lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
947c80b7b7bea4abf294010097c701688abb4a32
0
gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom,gchq/stroom
/* * Copyright 2016 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package stroom.pipeline.server.writer; import org.apache.commons.lang.StringEscapeUtils; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.xml.sax.SAXException; import stroom.pipeline.server.errorhandler.ErrorReceiverProxy; import stroom.pipeline.server.errorhandler.ProcessException; import stroom.pipeline.server.factory.ConfigurableElement; import stroom.pipeline.server.factory.ElementIcons; import stroom.pipeline.server.factory.PipelineProperty; import stroom.pipeline.shared.data.PipelineElementType; import stroom.pipeline.shared.data.PipelineElementType.Category; import stroom.util.spring.StroomScope; import javax.inject.Inject; import java.io.IOException; /** * Joins text instances into a single text instance. */ @Component @Scope(StroomScope.PROTOTYPE) @ConfigurableElement(type = "TextWriter", category = Category.WRITER, roles = {PipelineElementType.ROLE_TARGET, PipelineElementType.ROLE_HAS_TARGETS, PipelineElementType.ROLE_WRITER, PipelineElementType.VISABILITY_STEPPING}, icon = ElementIcons.TEXT) public class TextWriter extends AbstractWriter { private byte[] header; private byte[] footer; public TextWriter() { } @Inject public TextWriter(final ErrorReceiverProxy errorReceiverProxy) { super(errorReceiverProxy); } @Override public void endDocument() throws SAXException { try { // We return destinations here even though they would be returned // anyway in end processing because we want stepping mode to see // flushed output. returnDestinations(); } finally { super.endDocument(); } } /** * Writes characters. * * @param ch An array of characters. * @param start The starting position in the array. * @param length The number of characters to use from the array. * @throws org.xml.sax.SAXException The client may throw an exception during processing. * @see stroom.pipeline.server.filter.AbstractXMLFilter#characters(char[], * int, int) */ @Override public void characters(final char[] ch, final int start, final int length) throws SAXException { super.characters(ch, start, length); try { int lastStart = start; for (int i = start; i < start + length; i++) { final char c = ch[i]; if (c == '\n') { borrowDestinations(header, footer); getWriter().write(ch, lastStart, i - start); lastStart = i; returnDestinations(); } } if (lastStart < start + length) { borrowDestinations(header, footer); getWriter().write(ch, lastStart, length - lastStart); } } catch (final IOException e) { throw ProcessException.wrap(e.getMessage(), e); } } @PipelineProperty(description = "Header text that can be added to the output at the start.") public void setHeader(final String header) { try { if (header == null) { this.header = null; } else { this.header = StringEscapeUtils.unescapeJava(header).getBytes(); } } catch (final Exception e) { throw ProcessException.wrap(e.getMessage(), e); } } @PipelineProperty(description = "Footer text that can be added to the output at the end.") public void setFooter(final String footer) { try { if (footer == null) { this.footer = null; } else { this.footer = StringEscapeUtils.unescapeJava(footer).getBytes(); } } catch (final Exception e) { throw ProcessException.wrap(e.getMessage(), e); } } @Override @PipelineProperty(description = "The output character encoding to use.", defaultValue = "UTF-8") public void setEncoding(final String encoding) { super.setEncoding(encoding); } }
stroom-pipeline/src/main/java/stroom/pipeline/server/writer/TextWriter.java
/* * Copyright 2016 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package stroom.pipeline.server.writer; import java.io.IOException; import javax.inject.Inject; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; import org.xml.sax.SAXException; import stroom.pipeline.server.errorhandler.ErrorReceiverProxy; import stroom.pipeline.server.errorhandler.ProcessException; import stroom.pipeline.server.factory.ConfigurableElement; import stroom.pipeline.server.factory.ElementIcons; import stroom.pipeline.server.factory.PipelineProperty; import stroom.pipeline.shared.data.PipelineElementType; import stroom.pipeline.shared.data.PipelineElementType.Category; import stroom.util.spring.StroomScope; /** * Joins text instances into a single text instance. */ @Component @Scope(StroomScope.PROTOTYPE) @ConfigurableElement(type = "TextWriter", category = Category.WRITER, roles = { PipelineElementType.ROLE_TARGET, PipelineElementType.ROLE_HAS_TARGETS, PipelineElementType.ROLE_WRITER, PipelineElementType.VISABILITY_STEPPING }, icon = ElementIcons.TEXT) public class TextWriter extends AbstractWriter { private byte[] header; private byte[] footer; public TextWriter() { } @Inject public TextWriter(final ErrorReceiverProxy errorReceiverProxy) { super(errorReceiverProxy); } @Override public void endDocument() throws SAXException { try { // We return destinations here even though they would be returned // anyway in end processing because we want stepping mode to see // flushed output. returnDestinations(); } finally { super.endDocument(); } } /** * Writes characters. * * @param ch * An array of characters. * @param start * The starting position in the array. * @param length * The number of characters to use from the array. * @exception org.xml.sax.SAXException * The client may throw an exception during processing. * * @see stroom.pipeline.server.filter.AbstractXMLFilter#characters(char[], * int, int) */ @Override public void characters(final char[] ch, final int start, final int length) throws SAXException { super.characters(ch, start, length); try { int lastStart = start; for (int i = start; i < start + length; i++) { final char c = ch[i]; if (c == '\n') { borrowDestinations(header, footer); getWriter().write(ch, lastStart, i - start); lastStart = i; returnDestinations(); } } if (lastStart < start + length) { borrowDestinations(header, footer); getWriter().write(ch, lastStart, length - lastStart); } } catch (final IOException e) { throw ProcessException.wrap(e.getMessage(), e); } } @PipelineProperty(description = "Header text that can be added to the output at the start.") public void setHeader(final String header) { if (header == null) { this.header = null; } else { this.header = header.getBytes(); } } @PipelineProperty(description = "Footer text that can be added to the output at the end.") public void setFooter(final String footer) { if (footer == null) { this.footer = null; } else { this.footer = footer.getBytes(); } } @Override @PipelineProperty(description = "The output character encoding to use.", defaultValue = "UTF-8") public void setEncoding(final String encoding) { super.setEncoding(encoding); } }
Issue #24 : Header and footer strings are now unescaped so that character sequences such as '\n' are translated into single characters as with standard Java strings, e.g. '\n' will become a new line and '\t' a tab.
stroom-pipeline/src/main/java/stroom/pipeline/server/writer/TextWriter.java
Issue #24 : Header and footer strings are now unescaped so that character sequences such as '\n' are translated into single characters as with standard Java strings, e.g. '\n' will become a new line and '\t' a tab.
Java
apache-2.0
881d9c35c19b44048a529eb9215f3ff2be241266
0
Bigkoo/Android-PickerView
package com.bigkoo.pickerview.view; import android.app.Activity; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.graphics.Color; import android.view.Gravity; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.FrameLayout; import com.bigkoo.pickerview.R; import com.bigkoo.pickerview.listener.OnDismissListener; import com.bigkoo.pickerview.utils.PickerViewAnimateUtil; /** * Created by Sai on 15/11/22. * 精仿iOSPickerViewController控件 */ public class BasePickerView { private final FrameLayout.LayoutParams params = new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, Gravity.BOTTOM ); private Context context; protected ViewGroup contentContainer; private ViewGroup decorView;//activity的根View private ViewGroup rootView;//附加View 的 根View private ViewGroup dialogView;//附加Dialog 的 根View protected int pickerview_timebtn_nor = 0xFF057dff; protected int pickerview_timebtn_pre = 0xFFc2daf5; protected int pickerview_bg_topbar = 0xFFf5f5f5; protected int pickerview_topbar_title = 0xFF000000; protected int bgColor_default = 0xFFFFFFFF; private OnDismissListener onDismissListener; private boolean dismissing; private Animation outAnim; private Animation inAnim; private boolean isShowing; private int gravity = Gravity.BOTTOM; private Dialog mDialog; private boolean cancelable;//是否能取消 protected View clickView;//是通过哪个View弹出的 public BasePickerView(Context context) { this.context = context; /*initViews(); init(); initEvents();*/ } protected void initViews() { LayoutInflater layoutInflater = LayoutInflater.from(context); if (isDialog()) { //如果是对话框模式 dialogView = (ViewGroup) layoutInflater.inflate(R.layout.layout_basepickerview, null, false); //设置界面的背景为透明 dialogView.setBackgroundColor(Color.TRANSPARENT); //这个是真正要加载时间选取器的父布局 contentContainer = (ViewGroup) dialogView.findViewById(R.id.content_container); //设置对话框 左右间距屏幕30 this.params.leftMargin = 30; this.params.rightMargin = 30; contentContainer.setLayoutParams(this.params); //创建对话框 createDialog(); //给背景设置点击事件,这样当点击内容以外的地方会关闭界面 dialogView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dismiss(); } }); } else { //如果只是要显示在屏幕的下方 //decorView是activity的根View decorView = (ViewGroup) ((Activity) context).getWindow().getDecorView().findViewById(android.R.id.content); //将控件添加到decorView中 rootView = (ViewGroup) layoutInflater.inflate(R.layout.layout_basepickerview, decorView, false); rootView.setLayoutParams(new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT )); //这个是真正要加载时间选取器的父布局 contentContainer = (ViewGroup) rootView.findViewById(R.id.content_container); contentContainer.setLayoutParams(params); } setKeyBackCancelable(true); } protected void init() { inAnim = getInAnimation(); outAnim = getOutAnimation(); } protected void initEvents() { } /** * show的时候调用 * * @param view 这个View */ private void onAttached(View view) { decorView.addView(view); contentContainer.startAnimation(inAnim); } /** * 添加这个View到Activity的根视图 */ public void show() { if (isDialog()) { showDialog(); } else { if (isShowing()) { return; } isShowing = true; onAttached(rootView); rootView.requestFocus(); } } /** * 添加这个View到Activity的根视图 * * @param v (是通过哪个View弹出的) */ public void show(View v) { this.clickView = v; show(); } /** * 检测该View是不是已经添加到根视图 * * @return 如果视图已经存在该View返回true */ public boolean isShowing() { if (isDialog()) { return false; } else { return rootView.getParent() != null || isShowing; } } public void dismiss() { if (isDialog()) { dismissDialog(); } else { if (dismissing) { return; } dismissing = true; //消失动画 outAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { decorView.post(new Runnable() { @Override public void run() { dismissImmediately(); } }); } @Override public void onAnimationRepeat(Animation animation) { } }); contentContainer.startAnimation(outAnim); } } public void dismissImmediately() { //从activity根视图移除 decorView.removeView(rootView); isShowing = false; dismissing = false; if (onDismissListener != null) { onDismissListener.onDismiss(BasePickerView.this); } } public Animation getInAnimation() { int res = PickerViewAnimateUtil.getAnimationResource(this.gravity, true); return AnimationUtils.loadAnimation(context, res); } public Animation getOutAnimation() { int res = PickerViewAnimateUtil.getAnimationResource(this.gravity, false); return AnimationUtils.loadAnimation(context, res); } public BasePickerView setOnDismissListener(OnDismissListener onDismissListener) { this.onDismissListener = onDismissListener; return this; } public BasePickerView setKeyBackCancelable(boolean isCancelable) { ViewGroup View; if (isDialog()){ View = dialogView; }else { View = rootView; } View.setFocusable(isCancelable); View.setFocusableInTouchMode(isCancelable); if (isCancelable) { View.setOnKeyListener(onKeyBackListener); } else{ View.setOnKeyListener(null); } return this; } private View.OnKeyListener onKeyBackListener = new View.OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == MotionEvent.ACTION_DOWN && isShowing()){ dismiss(); return true; } return false; } } ; protected BasePickerView setOutSideCancelable(boolean isCancelable) { if (rootView != null) { View view = rootView.findViewById(R.id.outmost_container); if (isCancelable) { view.setOnTouchListener(onCancelableTouchListener); } else { view.setOnTouchListener(null); } } return this; } /** * 设置对话框模式是否可以点击外部取消 * @param cancelable */ public void setDialogOutSideCancelable(boolean cancelable) { this.cancelable = cancelable; } /** * Called when the user touch on black overlay in order to dismiss the dialog */ private final View.OnTouchListener onCancelableTouchListener = new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { dismiss(); } return false; } }; public View findViewById(int id) { return contentContainer.findViewById(id); } public void createDialog() { if (dialogView != null) { mDialog = new Dialog(context, R.style.custom_dialog2); mDialog.setCancelable(cancelable);//不能点外面取消,也不 能点back取消 mDialog.setContentView(dialogView); mDialog.getWindow().setWindowAnimations(R.style.pickerview_dialogAnim); mDialog.setOnDismissListener(new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { if (onDismissListener != null) { onDismissListener.onDismiss(BasePickerView.this); } } }); } } public void showDialog() { if (mDialog != null) { mDialog.show(); } } public void dismissDialog() { if (mDialog != null) { mDialog.dismiss(); } } public boolean isDialog() { return false; } }
pickerview/src/main/java/com/bigkoo/pickerview/view/BasePickerView.java
package com.bigkoo.pickerview.view; import android.app.Activity; import android.app.Dialog; import android.content.Context; import android.content.DialogInterface; import android.graphics.Color; import android.view.Gravity; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.widget.FrameLayout; import com.bigkoo.pickerview.R; import com.bigkoo.pickerview.listener.OnDismissListener; import com.bigkoo.pickerview.utils.PickerViewAnimateUtil; /** * Created by Sai on 15/11/22. * 精仿iOSPickerViewController控件 */ public class BasePickerView { private final FrameLayout.LayoutParams params = new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.WRAP_CONTENT, Gravity.BOTTOM ); private Context context; protected ViewGroup contentContainer; private ViewGroup decorView;//activity的根View private ViewGroup rootView;//附加View 的 根View private ViewGroup dialogView;//附加Dialog 的 根View protected int pickerview_timebtn_nor = 0xFF057dff; protected int pickerview_timebtn_pre = 0xFFc2daf5; protected int pickerview_bg_topbar = 0xFFf5f5f5; protected int pickerview_topbar_title = 0xFF000000; protected int bgColor_default = 0xFFFFFFFF; private OnDismissListener onDismissListener; private boolean dismissing; private Animation outAnim; private Animation inAnim; private boolean isShowing; private int gravity = Gravity.BOTTOM; private Dialog mDialog; private boolean cancelable;//是否能取消 protected View clickView;//是通过哪个View弹出的 public BasePickerView(Context context) { this.context = context; /*initViews(); init(); initEvents();*/ } protected void initViews() { LayoutInflater layoutInflater = LayoutInflater.from(context); if (isDialog()) { //如果是对话框模式 dialogView = (ViewGroup) layoutInflater.inflate(R.layout.layout_basepickerview, null, false); //设置界面的背景为透明 dialogView.setBackgroundColor(Color.TRANSPARENT); //这个是真正要加载时间选取器的父布局 contentContainer = (ViewGroup) dialogView.findViewById(R.id.content_container); //设置对话框 左右间距屏幕30 this.params.leftMargin = 30; this.params.rightMargin = 30; contentContainer.setLayoutParams(this.params); //创建对话框 createDialog(); //给背景设置点击事件,这样当点击内容以外的地方会关闭界面 dialogView.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { dismiss(); } }); } else { //如果只是要显示在屏幕的下方 //decorView是activity的根View decorView = (ViewGroup) ((Activity) context).getWindow().getDecorView().findViewById(android.R.id.content); //将控件添加到decorView中 rootView = (ViewGroup) layoutInflater.inflate(R.layout.layout_basepickerview, decorView, false); rootView.setLayoutParams(new FrameLayout.LayoutParams( ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT )); //这个是真正要加载时间选取器的父布局 contentContainer = (ViewGroup) rootView.findViewById(R.id.content_container); contentContainer.setLayoutParams(params); } setKeyBackCancelable(true); } protected void init() { inAnim = getInAnimation(); outAnim = getOutAnimation(); } protected void initEvents() { } /** * show的时候调用 * * @param view 这个View */ private void onAttached(View view) { decorView.addView(view); contentContainer.startAnimation(inAnim); } /** * 添加这个View到Activity的根视图 */ public void show() { if (isDialog()) { showDialog(); } else { if (isShowing()) { return; } isShowing = true; onAttached(rootView); rootView.requestFocus(); } } /** * 添加这个View到Activity的根视图 * * @param v (是通过哪个View弹出的) */ public void show(View v) { this.clickView = v; show(); } /** * 检测该View是不是已经添加到根视图 * * @return 如果视图已经存在该View返回true */ public boolean isShowing() { if (isDialog()) { return false; } else { return rootView.getParent() != null || isShowing; } } public void dismiss() { if (isDialog()) { dismissDialog(); } else { if (dismissing) { return; } dismissing = true; //消失动画 outAnim.setAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { decorView.post(new Runnable() { @Override public void run() { dismissImmediately(); } }); } @Override public void onAnimationRepeat(Animation animation) { } }); contentContainer.startAnimation(outAnim); } } public void dismissImmediately() { //从activity根视图移除 decorView.removeView(rootView); isShowing = false; dismissing = false; if (onDismissListener != null) { onDismissListener.onDismiss(BasePickerView.this); } } public Animation getInAnimation() { int res = PickerViewAnimateUtil.getAnimationResource(this.gravity, true); return AnimationUtils.loadAnimation(context, res); } public Animation getOutAnimation() { int res = PickerViewAnimateUtil.getAnimationResource(this.gravity, false); return AnimationUtils.loadAnimation(context, res); } public BasePickerView setOnDismissListener(OnDismissListener onDismissListener) { this.onDismissListener = onDismissListener; return this; } public BasePickerView setKeyBackCancelable(boolean isCancelable) { ViewGroup View; if (isDialog()){ View = dialogView; }else { View = rootView; } View.setFocusable(isCancelable); View.setFocusableInTouchMode(isCancelable); if (isCancelable) { View.setOnKeyListener(onKeyBackListener); } else{ View.setOnKeyListener(null); } return this; } private View.OnKeyListener onKeyBackListener = new View.OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_BACK && event.getAction() == MotionEvent.ACTION_DOWN && isShowing()){ dismiss(); return true; } return false; } } ; protected BasePickerView setOutSideCancelable(boolean isCancelable) { if (rootView != null) { View view = rootView.findViewById(R.id.outmost_container); if (isCancelable) { view.setOnTouchListener(onCancelableTouchListener); } else { view.setOnTouchListener(null); } } return this; } /** * 设置对话框模式是否可以点击外部取消 * @param cancelable */ public void setDialogOutSideCancelable(boolean cancelable) { this.cancelable = cancelable; } /** * Called when the user touch on black overlay in order to dismiss the dialog */ private final View.OnTouchListener onCancelableTouchListener = new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent event) { if (event.getAction() == MotionEvent.ACTION_DOWN) { dismiss(); } return false; } }; public View findViewById(int id) { return contentContainer.findViewById(id); } public void createDialog() { if (dialogView != null) { mDialog = new Dialog(context, R.style.custom_dialog2); mDialog.setCancelable(cancelable);//不能点外面取消,也不 能点back取消 mDialog.setContentView(dialogView); mDialog.getWindow().setWindowAnimations(R.style.dialogAnim); mDialog.setOnDismissListener(new DialogInterface.OnDismissListener() { @Override public void onDismiss(DialogInterface dialog) { if (onDismissListener != null) { onDismissListener.onDismiss(BasePickerView.this); } } }); } } public void showDialog() { if (mDialog != null) { mDialog.show(); } } public void dismissDialog() { if (mDialog != null) { mDialog.dismiss(); } } public boolean isDialog() { return false; } }
Update BasePickerView.java
pickerview/src/main/java/com/bigkoo/pickerview/view/BasePickerView.java
Update BasePickerView.java
Java
apache-2.0
740a51a70dfe63025685193561104543c688e78b
0
bkosawa/android-app-recommendation
package br.com.kosawalabs.apprecommendation.data.pojo; import android.support.annotation.NonNull; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class App { @SerializedName("id") @Expose private Integer id; @SerializedName("package_name") @Expose private String packageName; @SerializedName("name") @Expose private String name; @SerializedName("icon_url") @Expose private String iconUrl; @SerializedName("description") @Expose private String description; @SerializedName("category_key") @Expose private String categoryKey; @SerializedName("category_name") @Expose private String categoryName; @SerializedName("developer_name") @Expose private String developerName; @SerializedName("version") @Expose private String version; @SerializedName("content_rating") @Expose private String contentRating; @SerializedName("size") @Expose private String size; public Integer getId() { return id; } public String getPackageName() { return packageName; } @NonNull public String getName() { return name != null ? name : ""; } @NonNull public String getIconUrl() { return iconUrl != null ? iconUrl : ""; } public String getDescription() { return description; } public String getCategoryKey() { return categoryKey; } @NonNull public String getCategoryName() { return categoryName != null ? categoryName : ""; } @NonNull public String getDeveloperName() { return developerName != null ? developerName : ""; } public String getVersion() { return version; } public String getContentRating() { return contentRating; } public String getSize() { return size; } }
app/src/main/java/br/com/kosawalabs/apprecommendation/data/pojo/App.java
package br.com.kosawalabs.apprecommendation.data.pojo; import com.google.gson.annotations.Expose; import com.google.gson.annotations.SerializedName; public class App { @SerializedName("id") @Expose private Integer id; @SerializedName("package_name") @Expose private String packageName; @SerializedName("name") @Expose private String name; @SerializedName("icon_url") @Expose private String iconUrl; @SerializedName("description") @Expose private String description; @SerializedName("category_key") @Expose private String categoryKey; @SerializedName("category_name") @Expose private String categoryName; @SerializedName("developer_name") @Expose private String developerName; @SerializedName("version") @Expose private String version; @SerializedName("content_rating") @Expose private String contentRating; @SerializedName("size") @Expose private String size; public Integer getId() { return id; } public String getPackageName() { return packageName; } public String getName() { return name; } public String getIconUrl() { return iconUrl; } public String getDescription() { return description; } public String getCategoryKey() { return categoryKey; } public String getCategoryName() { return categoryName; } public String getDeveloperName() { return developerName; } public String getVersion() { return version; } public String getContentRating() { return contentRating; } public String getSize() { return size; } }
Making App pojo gets non-nullable
app/src/main/java/br/com/kosawalabs/apprecommendation/data/pojo/App.java
Making App pojo gets non-nullable
Java
apache-2.0
b703e5dc72c04adbc113d0e4f80cad37063d8e3c
0
GhostRealms/Support-Ticket
package net.ghostrealms.ticket; /** * Created by River on 2/23/2015. * Ticket Manager for Loading, caching, and updating tickets throughout the database system* */ public class TicketManager { private final DataManager manager; public TicketManager(DataManager manager) { this.manager = manager; } }
src/main/java/net/ghostrealms/ticket/TicketManager.java
package net.ghostrealms.ticket; /** * Created by River on 2/23/2015. * Ticket Manager for Loading, caching, and updating tickets throughout the database system* */ public class TicketManager { }
create the TicketManager class to manage loading, caching, and editing of tickets.
src/main/java/net/ghostrealms/ticket/TicketManager.java
create the TicketManager class to manage loading, caching, and editing of tickets.
Java
apache-2.0
5a23548f7023a0b71eca65b20a21c92d9a09776a
0
apache/lenya,apache/lenya,apache/lenya,apache/lenya
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.cms.linking; /** * A link to a document. */ public class Link { private String uuid; private String language; private String revision; private String area; private String pubId; /** * Ctor. */ public Link() { } /** * @return The area. */ public String getArea() { return area; } /** * @param area The area. */ public void setArea(String area) { this.area = area; } /** * @return The language. */ public String getLanguage() { return language; } /** * @param language The language. */ public void setLanguage(String language) { this.language = language; } /** * @return The publication ID. */ public String getPubId() { return pubId; } /** * @param pubId The publication ID. */ public void setPubId(String pubId) { this.pubId = pubId; } /** * @return The revision. */ public String getRevision() { return revision; } /** * @param revision The revision. */ public void setRevision(String revision) { this.revision = revision; } /** * @return The UUID. */ public String getUuid() { return uuid; } /** * @param uuid The UUID. */ public void setUuid(String uuid) { this.uuid = uuid; } /** * @return The link URI. */ public String getUri() { String uri = LinkResolver.SCHEME + ":"; if (this.uuid != null) { uri = uri + this.uuid; } if (this.language != null) { uri = uri + ",lang=" + this.language; } if (this.area != null) { uri = uri + ",area=" + this.area; } if (this.pubId != null) { uri = uri + ",pub=" + this.pubId; } if (this.revision != null) { uri = uri + ",rev=" + this.revision; } return uri; } }
src/modules-core/linking/java/src/org/apache/lenya/cms/linking/Link.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.lenya.cms.linking; /** * A link to a document. */ public class Link { private String uuid; private String language; private String revision; private String area; private String pubId; public Link() { } public String getArea() { return area; } public void setArea(String area) { this.area = area; } public String getLanguage() { return language; } public void setLanguage(String language) { this.language = language; } public String getPubId() { return pubId; } public void setPubId(String pubId) { this.pubId = pubId; } public String getRevision() { return revision; } public void setRevision(String revision) { this.revision = revision; } public String getUuid() { return uuid; } public void setUuid(String uuid) { this.uuid = uuid; } public String getUri() { String uri = LinkResolver.SCHEME + ":"; if (this.uuid != null) { uri = uri + this.uuid; } if (this.language != null) { uri = uri + ",lang=" + this.language; } if (this.area != null) { uri = uri + ",area=" + this.area; } if (this.pubId != null) { uri = uri + ",pub=" + this.pubId; } if (this.revision != null) { uri = uri + ",rev=" + this.revision; } return uri; } }
[minor change] added some javadocs git-svn-id: c334bb69c16d150e1b06e84516f7aa90b3181ca2@474697 13f79535-47bb-0310-9956-ffa450edef68
src/modules-core/linking/java/src/org/apache/lenya/cms/linking/Link.java
[minor change] added some javadocs
Java
apache-2.0
7d100fc329af741d4a38c8fe9920a9c3bbed5149
0
boundary/boundary-event-sdk,boundary/boundary-event-sdk,boundary/boundary-event-sdk,boundary/boundary-event-sdk,boundary/boundary-event-sdk,boundary/boundary-event-sdk,boundary/boundary-event-sdk
package com.boundary.sdk.event.service; import static org.junit.Assert.*; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelSpringTestSupport; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.springframework.context.support.AbstractApplicationContext; import com.boundary.camel.component.ping.PingConfiguration; import com.boundary.camel.component.port.PortConfiguration; public class ServiceCheckRouterTest extends CamelSpringTestSupport { @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Ignore @Test public void test() { MockEndpoint mock = getMockEndpoint("mock:service-checks-router-out"); } @Ignore @Test public void testGetEndPointsFromRequest() { ServiceCheckRouter router = new ServiceCheckRouter(); ServiceCheckRequest request = new ServiceCheckRequest(); PingConfiguration pingConfiguration = new PingConfiguration(); PortConfiguration portConfiguration = new PortConfiguration(); ServiceTest<PingConfiguration> pingTest = new ServiceTest<PingConfiguration>("ping",request.getRequestId(),pingConfiguration); ServiceTest<PortConfiguration> portTest = new ServiceTest<PortConfiguration>("port",request.getRequestId(),portConfiguration); request.addServiceTest(pingTest); request.addServiceTest(portTest); // String endPoints = router.getEndPointsFromRequest(request); // // assertEquals("check test names","ping,port",endPoints); } @Override protected AbstractApplicationContext createApplicationContext() { // TODO Auto-generated method stub return null; } }
src/test/java/com/boundary/sdk/event/service/ServiceCheckRouterTest.java
package com.boundary.sdk.event.service; import static org.junit.Assert.*; import org.apache.camel.component.mock.MockEndpoint; import org.apache.camel.test.junit4.CamelSpringTestSupport; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.springframework.context.support.AbstractApplicationContext; import com.boundary.camel.component.ping.PingConfiguration; import com.boundary.camel.component.port.PortConfiguration; public class ServiceCheckRouterTest extends CamelSpringTestSupport { @BeforeClass public static void setUpBeforeClass() throws Exception { } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Test public void test() { MockEndpoint mock = getMockEndpoint("mock:service-checks-router-out"); } @Test public void testGetEndPointsFromRequest() { ServiceCheckRouter router = new ServiceCheckRouter(); ServiceCheckRequest request = new ServiceCheckRequest(); PingConfiguration pingConfiguration = new PingConfiguration(); PortConfiguration portConfiguration = new PortConfiguration(); ServiceTest<PingConfiguration> pingTest = new ServiceTest<PingConfiguration>("ping",request.getRequestId(),pingConfiguration); ServiceTest<PortConfiguration> portTest = new ServiceTest<PortConfiguration>("port",request.getRequestId(),portConfiguration); request.addServiceTest(pingTest); request.addServiceTest(portTest); // String endPoints = router.getEndPointsFromRequest(request); // // assertEquals("check test names","ping,port",endPoints); } @Override protected AbstractApplicationContext createApplicationContext() { // TODO Auto-generated method stub return null; } }
Ignore stubbed out tests Former-commit-id: 53a2ce9d892fa2e648f88e9666f02b194d7368e6
src/test/java/com/boundary/sdk/event/service/ServiceCheckRouterTest.java
Ignore stubbed out tests
Java
apache-2.0
0e0298d8a33d8c4e2d688b1b6fa17c634efbba3a
0
cuba-platform/cuba,cuba-platform/cuba,dimone-kun/cuba,cuba-platform/cuba,dimone-kun/cuba,dimone-kun/cuba
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.gui.app.core.bulk; import com.haulmont.chile.core.datatypes.impl.ByteArrayDatatype; import com.haulmont.chile.core.datatypes.impl.UUIDDatatype; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.chile.core.model.MetaPropertyPath; import com.haulmont.cuba.core.entity.Entity; import com.haulmont.cuba.core.global.*; import com.haulmont.cuba.gui.AppConfig; import com.haulmont.cuba.gui.WindowParam; import com.haulmont.cuba.gui.components.*; import com.haulmont.cuba.gui.components.Action.Status; import com.haulmont.cuba.gui.components.DialogAction.Type; import com.haulmont.cuba.gui.data.DataSupplier; import com.haulmont.cuba.gui.data.Datasource; import com.haulmont.cuba.gui.data.NestedDatasource; import com.haulmont.cuba.gui.data.impl.DatasourceImpl; import com.haulmont.cuba.gui.data.impl.DsContextImpl; import com.haulmont.cuba.gui.data.impl.EmbeddedDatasourceImpl; import com.haulmont.cuba.gui.theme.ThemeConstants; import com.haulmont.cuba.gui.xml.layout.ComponentsFactory; import com.haulmont.cuba.security.entity.EntityAttrAccess; import com.haulmont.cuba.security.entity.EntityOp; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.*; import java.util.regex.Pattern; import java.util.stream.Collectors; import static com.haulmont.bali.util.Preconditions.checkNotNullArgument; public class BulkEditorWindow extends AbstractWindow { @Inject protected ViewRepository viewRepository; @Inject protected MetadataTools metadataTools; @Inject protected MessageTools messageTools; @Inject protected Metadata metadata; @Inject protected DataSupplier dataSupplier; @Inject protected ComponentsFactory componentsFactory; @Inject protected Configuration configuration; @Inject protected Security security; @Inject protected BoxLayout contentPane; @Inject protected Label infoLabel; @Inject protected Button applyButton; @Inject protected ThemeConstants themeConstants; @WindowParam(required = true) protected MetaClass metaClass; @WindowParam(required = true) protected Set<Entity> selected; @WindowParam protected String exclude; @WindowParam protected Map<String, Field.Validator> fieldValidators; @WindowParam protected List<Field.Validator> modelValidators; protected Pattern excludeRegex; protected DsContextImpl dsContext; protected Datasource<Entity> datasource; protected Map<String, Datasource<Entity>> datasources = new HashMap<>(); protected Map<String, ManagedField> managedFields = new LinkedHashMap<>(); protected Map<String, Field> dataFields = new LinkedHashMap<>(); protected BulkEditorFieldFactory fieldFactory = new BulkEditorFieldFactory(); protected List<Entity> items; protected List<String> managedEmbeddedProperties = new ArrayList<>(); @Override public void init(Map<String, Object> params) { super.init(params); int width = themeConstants.getInt("cuba.gui.BulkEditorWindow.width"); int height = themeConstants.getInt("cuba.gui.BulkEditorWindow.height"); getDialogOptions() .setWidth(width) .setHeight(height); checkNotNullArgument(metaClass); checkNotNullArgument(selected); if (StringUtils.isNotBlank(exclude)) { excludeRegex = Pattern.compile(exclude); } for (ManagedField managedField : getManagedFields(metaClass)) { managedFields.put(managedField.getFqn(), managedField); } View view = createView(metaClass); items = loadItems(view); dsContext = new DsContextImpl(dataSupplier); dsContext.setFrameContext(getDsContext().getFrameContext()); setDsContext(dsContext); datasource = new DatasourceImpl<>(); datasource.setup(dsContext, dataSupplier, metaClass.getName() + "Ds", metaClass, view); ((DatasourceImpl) datasource).valid(); dsContext.register(datasource); createNestedEmbeddedDatasources(datasource, metaClass, ""); Entity instance = metadata.create(metaClass); createEmbeddedFields(metaClass, instance, ""); datasource.setItem(instance); datasource.setAllowCommit(false); createDataComponents(); } protected void createDataComponents() { if (managedFields.isEmpty()) { infoLabel.setValue(getMessage("bulk.noEditableProperties")); applyButton.setVisible(false); return; } GridLayout grid = componentsFactory.createComponent(GridLayout.class); grid.setSpacing(true); grid.setColumns(4); grid.setRows((managedFields.size() + 1) / 2); grid.setStyleName("c-bulk-editor-grid"); contentPane.add(grid); grid.setFrame(frame); List<ManagedField> editFields = new ArrayList<>(managedFields.values()); editFields.sort((o1, o2) -> o1.getLocalizedName().compareTo(o2.getLocalizedName())); String fieldWidth = themeConstants.get("cuba.gui.BulkEditorWindow.field.width"); for (ManagedField field : editFields) { Label label = componentsFactory.createComponent(Label.class); label.setFrame(getFrame()); label.setValue(field.getLocalizedName()); label.setAlignment(Alignment.TOP_LEFT); label.setStyleName("field-label"); if (AppConfig.getClientType() == ClientType.DESKTOP) { label.setHeight("25px"); } grid.add(label); Datasource<Entity> fieldDs = datasource; // field owner metaclass is embeddable only if field domain embeddable, // so we can check field domain if (metadataTools.isEmbeddable(field.getMetaProperty().getDomain())) { fieldDs = datasources.get(field.getParentFqn()); } final Field editField = fieldFactory.createField(fieldDs, field.getMetaProperty()); if (editField != null) { editField.setFrame(getFrame()); editField.setWidth(fieldWidth); boolean required = editField.isRequired(); BoxLayout boxLayout = componentsFactory.createComponent(HBoxLayout.class); boxLayout.setFrame(getFrame()); boxLayout.setSpacing(true); boxLayout.add(editField); if (!required) { final Button clearButton = componentsFactory.createComponent(Button.class); clearButton.setFrame(getFrame()); Action action = new AbstractAction("actions.BulkClear") { @Override public void actionPerform(Component component) { editField.setEnabled(!editField.isEnabled()); if (!editField.isEnabled()) { editField.setValue(null); setIcon("icons/edit.png"); clearButton.setDescription(getMessage("bulk.editAttribute")); } else { setIcon("icons/trash.png"); clearButton.setDescription(getMessage("bulk.clearAttribute")); } } }; action.setCaption(""); action.setIcon("icons/trash.png"); clearButton.setAction(action); clearButton.setDescription(getMessage("bulk.clearAttribute")); boxLayout.add(clearButton); } editField.setRequired(false); editField.setValue(null); if (fieldValidators != null) { Field.Validator validator = fieldValidators.get(field.getFqn()); if (validator != null) { editField.addValidator(validator); } } grid.add(boxLayout); dataFields.put(field.getFqn(), editField); } else { Label unknownLabel = componentsFactory.createComponent(Label.class); unknownLabel.setFrame(getFrame()); grid.add(unknownLabel); } } if (!dataFields.isEmpty()) { dataFields.values().iterator().next().requestFocus(); } } protected boolean isByteArray(MetaProperty metaProperty) { return ByteArrayDatatype.NAME.equals(metaProperty.getRange().asDatatype().getName()); } protected boolean isUuid(MetaProperty metaProperty) { return UUIDDatatype.NAME.equals(metaProperty.getRange().asDatatype().getName()); } /** * Recursively instantiates the embedded properties. * E.g. embedded properties of the embedded property will also be instantiated. * * @param metaClass meta class of the entity * @param item entity instance */ protected void createEmbeddedFields(MetaClass metaClass, Entity item, String fqnPrefix) { for (MetaProperty metaProperty : metaClass.getProperties()) { String fqn = metaProperty.getName(); if (StringUtils.isNotEmpty(fqnPrefix)) { fqn = fqnPrefix + "." + fqn; } if (managedEmbeddedProperties.contains(fqn) && metadataTools.isEmbedded(metaProperty)) { MetaClass embeddedMetaClass = metaProperty.getRange().asClass(); Entity embedded = item.getValue(metaProperty.getName()); if (embedded == null) { embedded = metadata.create(embeddedMetaClass); item.setValue(metaProperty.getName(), embedded); } createEmbeddedFields(embeddedMetaClass, embedded, fqn); } } } protected void createNestedEmbeddedDatasources(Datasource masterDs, MetaClass metaClass, String fqnPrefix) { for (MetaProperty metaProperty : metaClass.getProperties()) { if (MetaProperty.Type.ASSOCIATION == metaProperty.getType() || MetaProperty.Type.COMPOSITION == metaProperty.getType()) { String fqn = metaProperty.getName(); if (StringUtils.isNotEmpty(fqnPrefix)) { fqn = fqnPrefix + "." + fqn; } if (managedEmbeddedProperties.contains(fqn) && metadataTools.isEmbedded(metaProperty)) { MetaClass propertyMetaClass = metaProperty.getRange().asClass(); @SuppressWarnings("unchecked") NestedDatasource<Entity> propertyDs = new EmbeddedDatasourceImpl(); propertyDs.setup(fqn + "Ds", masterDs, metaProperty.getName()); propertyDs.setAllowCommit(false); createNestedEmbeddedDatasources(propertyDs, propertyMetaClass, fqn); datasources.put(fqn, propertyDs); dsContext.register(propertyDs); } } } } /** * Creates a view, loading only neccessary properties. * Referenced entities will be loaded with a MINIMAL view. * * @param meta meta class * @return View instance */ protected View createView(MetaClass meta) { @SuppressWarnings("unchecked") View view = new View(meta.getJavaClass(), false); for (MetaProperty metaProperty : meta.getProperties()) { if (!managedFields.containsKey(metaProperty.getName()) && !managedEmbeddedProperties.contains(metaProperty.getName())) { continue; } switch (metaProperty.getType()) { case DATATYPE: case ENUM: view.addProperty(metaProperty.getName()); break; case ASSOCIATION: case COMPOSITION: View propView; if (!metadataTools.isEmbedded(metaProperty)) { propView = viewRepository.getView(metaProperty.getRange().asClass(), View.MINIMAL); //in some cases JPA loads extended entities as instance of base class which leads to ClassCastException //loading property lazy prevents this from happening view.addProperty(metaProperty.getName(), propView, true); } else { // build view for embedded property propView = createEmbeddedView(metaProperty.getRange().asClass(), metaProperty.getName()); view.addProperty(metaProperty.getName(), propView, false); } break; default: throw new IllegalStateException("unknown property type"); } } return view; } protected View createEmbeddedView(MetaClass meta, String fqnPrefix) { @SuppressWarnings("unchecked") View view = new View(meta.getJavaClass(), false); for (MetaProperty metaProperty : meta.getProperties()) { String fqn = fqnPrefix + "." + metaProperty.getName(); if (!managedFields.containsKey(fqn)) { continue; } switch (metaProperty.getType()) { case DATATYPE: case ENUM: view.addProperty(metaProperty.getName()); break; case ASSOCIATION: case COMPOSITION: View propView; if (!metadataTools.isEmbedded(metaProperty)) { propView = viewRepository.getView(metaProperty.getRange().asClass(), View.MINIMAL); } else { // build view for embedded property propView = createEmbeddedView(metaProperty.getRange().asClass(), fqn); } //in some cases JPA loads extended entities as instance of base class which leads to ClassCastException //loading property lazy prevents this from happening view.addProperty(metaProperty.getName(), propView, true); break; default: throw new IllegalStateException("unknown property type"); } } return view; } protected boolean isPermitted(MetaClass metaClass, MetaProperty metaProperty) { return security.isEntityAttrPermitted(metaClass, metaProperty.getName(), EntityAttrAccess.MODIFY); } protected boolean isManagedAttribute(MetaClass metaClass, MetaProperty metaProperty) { if (metadataTools.isSystem(metaProperty) || metadataTools.isTransient(metaProperty) || metadataTools.isSystemLevel(metaProperty) || metaProperty.getRange().getCardinality().isMany() || !isPermitted(metaClass, metaProperty)) { return false; } if (metaProperty.getRange().isDatatype() && (isByteArray(metaProperty) || isUuid(metaProperty))) { return false; } if (metaProperty.getRange().isClass()) { MetaClass propertyMetaClass = metaProperty.getRange().asClass(); if (metadataTools.isSystemLevel(propertyMetaClass)) { return false; } if (!security.isEntityOpPermitted(propertyMetaClass, EntityOp.READ)) { return false; } } return !(excludeRegex != null && excludeRegex.matcher(metaProperty.getName()).matches()); } protected List<ManagedField> getManagedFields(MetaClass metaClass) { List<ManagedField> managedFields = new ArrayList<>(); // sort Fields for (MetaProperty metaProperty : metaClass.getProperties()) { if (isManagedAttribute(metaClass, metaProperty)) { String propertyCaption = messageTools.getPropertyCaption(metaClass, metaProperty.getName()); if (!metadataTools.isEmbedded(metaProperty)) { managedFields.add(new ManagedField(metaProperty.getName(), metaProperty, propertyCaption, null)); } else { List<ManagedField> nestedFields = getManagedFields(metaProperty, metaProperty.getName(), propertyCaption); if (nestedFields.size() > 0) { managedEmbeddedProperties.add(metaProperty.getName()); } managedFields.addAll(nestedFields); } } } return managedFields; } protected List<ManagedField> getManagedFields(MetaProperty embeddedProperty, String fqnPrefix, String localePrefix) { List<ManagedField> managedFields = new ArrayList<>(); MetaClass metaClass = embeddedProperty.getRange().asClass(); for (MetaProperty metaProperty : metaClass.getProperties()) { if (isManagedAttribute(metaClass, metaProperty)) { String fqn = fqnPrefix + "." + metaProperty.getName(); String localeName = localePrefix + " " + messageTools.getPropertyCaption(metaClass, metaProperty.getName()); if (!metadataTools.isEmbedded(metaProperty)) { managedFields.add(new ManagedField(fqn, metaProperty, localeName, fqnPrefix)); } else { List<ManagedField> nestedFields = getManagedFields(metaProperty, fqn, localeName); if (nestedFields.size() > 0) { managedEmbeddedProperties.add(fqn); } managedFields.addAll(nestedFields); } } } return managedFields; } @Override protected boolean preClose(String actionId) { if (actionId.equals(CLOSE_ACTION_ID)) { cancelChanges(); return false; } return super.preClose(actionId); } public void cancelChanges() { if (hasChanges()) { showOptionDialog(messages.getMainMessage("closeUnsaved.caption"), messages.getMainMessage("closeUnsaved"), MessageType.CONFIRMATION, new Action[]{ new DialogAction(Type.YES) { @Override public void actionPerform(Component component) { close(CLOSE_ACTION_ID, true); } }, new DialogAction(Type.NO, Status.PRIMARY) }); } else { close(CLOSE_ACTION_ID, true); } } private boolean hasChanges() { for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { return true; } } return false; } public void applyChanges() { if (validateAll()) { StringBuilder sb = new StringBuilder(); if (modelValidators != null) { for (Field.Validator moduleValidator : modelValidators) { try { moduleValidator.validate(datasource); } catch (ValidationException e) { sb.append(e.getMessage()); sb.append("\n"); } } } if (sb.length() == 0) { List<String> fields = new ArrayList<>(); for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { String localizedName = managedFields.get(fieldEntry.getKey()).getLocalizedName(); fields.add("- " + localizedName); } } if (!fields.isEmpty()) { showOptionDialog(getMessage("bulk.confirmation"), formatMessage("bulk.applyConfirmation", items.size(), StringUtils.join(fields, "\n")), MessageType.CONFIRMATION, new Action[]{ new AbstractAction("actions.Apply") { { icon = themeConstants.get("actions.dialog.Ok.icon"); } @Override public void actionPerform(Component component) { commitChanges(); } }, new DialogAction(Type.CANCEL, Status.PRIMARY) }); } else { showNotification(getMessage("bulk.noChanges"), NotificationType.HUMANIZED); } } else { showNotification(sb.toString(), NotificationType.TRAY); } } } protected List<Entity> loadItems(View view) { LoadContext.Query query = new LoadContext.Query(String.format("select e from %s e where e.%s in :ids", metaClass, metadataTools.getPrimaryKeyName(metaClass))); List<Object> ids = selected.stream() .map(Entity::getId) .collect(Collectors.toList()); query.setParameter("ids", ids); LoadContext<Entity> lc = new LoadContext<>(metaClass); lc.setSoftDeletion(false); lc.setQuery(query); lc.setView(view); return dataSupplier.loadList(lc); } protected void commitChanges() { List<String> fields = new ArrayList<>(); for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { fields.add(managedFields.get(fieldEntry.getKey()).getFqn()); } } for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (!field.isEnabled()) { for (Entity item : items) { ensureEmbeddedPropertyCreated(item, fieldEntry.getKey()); item.setValueEx(fieldEntry.getKey(), null); } } else if (field.getValue() != null) { for (Entity item : items) { ensureEmbeddedPropertyCreated(item, fieldEntry.getKey()); item.setValueEx(fieldEntry.getKey(), field.getValue()); } } } Set<Entity> commited = dataSupplier.commit(new CommitContext(items)); Logger logger = LoggerFactory.getLogger(BulkEditorWindow.class); logger.info("Applied bulk editing for {} entries of {}. Changed properties: {}", commited.size(), metaClass, StringUtils.join(fields, ", ")); showNotification(formatMessage("bulk.successMessage", commited.size()), NotificationType.HUMANIZED); close(COMMIT_ACTION_ID); } protected void ensureEmbeddedPropertyCreated(Entity item, String propertyPath) { if (!StringUtils.contains(propertyPath, ".")) { return; } MetaPropertyPath path = metaClass.getPropertyPath(propertyPath); if (path != null) { Entity currentItem = item; for (MetaProperty property : path.getMetaProperties()) { if (metadataTools.isEmbedded(property)) { Object currentItemValue = currentItem.getValue(property.getName()); if (currentItemValue == null) { Entity newItem = metadata.create(property.getRange().asClass()); currentItem.setValue(property.getName(), newItem); currentItem = newItem; } else { currentItem = (Entity) currentItemValue; } } else { break; } } } } protected static class ManagedField { protected final String fqn; protected final String parentFqn; protected final String localizedName; protected final MetaProperty metaProperty; public ManagedField(String fqn, MetaProperty metaProperty, String localizedName, String parentFqn) { this.fqn = fqn; this.metaProperty = metaProperty; this.localizedName = localizedName; this.parentFqn = parentFqn; } public String getFqn() { return fqn; } public String getParentFqn() { return parentFqn; } public String getLocalizedName() { return localizedName; } public MetaProperty getMetaProperty() { return metaProperty; } } }
modules/gui/src/com/haulmont/cuba/gui/app/core/bulk/BulkEditorWindow.java
/* * Copyright (c) 2008-2016 Haulmont. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.haulmont.cuba.gui.app.core.bulk; import com.haulmont.chile.core.datatypes.impl.ByteArrayDatatype; import com.haulmont.chile.core.datatypes.impl.UUIDDatatype; import com.haulmont.chile.core.model.MetaClass; import com.haulmont.chile.core.model.MetaProperty; import com.haulmont.chile.core.model.MetaPropertyPath; import com.haulmont.cuba.core.entity.BaseUuidEntity; import com.haulmont.cuba.core.entity.Entity; import com.haulmont.cuba.core.global.*; import com.haulmont.cuba.gui.AppConfig; import com.haulmont.cuba.gui.WindowParam; import com.haulmont.cuba.gui.components.*; import com.haulmont.cuba.gui.components.Action.Status; import com.haulmont.cuba.gui.components.DialogAction.Type; import com.haulmont.cuba.gui.data.DataSupplier; import com.haulmont.cuba.gui.data.Datasource; import com.haulmont.cuba.gui.data.NestedDatasource; import com.haulmont.cuba.gui.data.impl.DatasourceImpl; import com.haulmont.cuba.gui.data.impl.DsContextImpl; import com.haulmont.cuba.gui.data.impl.EmbeddedDatasourceImpl; import com.haulmont.cuba.gui.theme.ThemeConstants; import com.haulmont.cuba.gui.xml.layout.ComponentsFactory; import com.haulmont.cuba.security.entity.EntityAttrAccess; import com.haulmont.cuba.security.entity.EntityOp; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.*; import java.util.regex.Pattern; import static com.haulmont.bali.util.Preconditions.checkNotNullArgument; public class BulkEditorWindow extends AbstractWindow { @Inject protected ViewRepository viewRepository; @Inject protected MetadataTools metadataTools; @Inject protected MessageTools messageTools; @Inject protected Metadata metadata; @Inject protected DataSupplier dataSupplier; @Inject protected ComponentsFactory componentsFactory; @Inject protected Configuration configuration; @Inject protected Security security; @Inject protected BoxLayout contentPane; @Inject protected Label infoLabel; @Inject protected Button applyButton; @Inject protected ThemeConstants themeConstants; @WindowParam(required = true) protected MetaClass metaClass; @WindowParam(required = true) protected Set<Entity> selected; @WindowParam protected String exclude; @WindowParam protected Map<String, Field.Validator> fieldValidators; @WindowParam protected List<Field.Validator> modelValidators; protected Pattern excludeRegex; protected DsContextImpl dsContext; protected Datasource<Entity> datasource; protected Map<String, Datasource<Entity>> datasources = new HashMap<>(); protected Map<String, ManagedField> managedFields = new LinkedHashMap<>(); protected Map<String, Field> dataFields = new LinkedHashMap<>(); protected BulkEditorFieldFactory fieldFactory = new BulkEditorFieldFactory(); protected List<Entity> items; protected List<String> managedEmbeddedProperties = new ArrayList<>(); @Override public void init(Map<String, Object> params) { super.init(params); int width = themeConstants.getInt("cuba.gui.BulkEditorWindow.width"); int height = themeConstants.getInt("cuba.gui.BulkEditorWindow.height"); getDialogOptions() .setWidth(width) .setHeight(height); checkNotNullArgument(metaClass); checkNotNullArgument(selected); if (StringUtils.isNotBlank(exclude)) { excludeRegex = Pattern.compile(exclude); } for (ManagedField managedField : getManagedFields(metaClass)) { managedFields.put(managedField.getFqn(), managedField); } View view = createView(metaClass); items = loadItems(view); dsContext = new DsContextImpl(dataSupplier); dsContext.setFrameContext(getDsContext().getFrameContext()); setDsContext(dsContext); datasource = new DatasourceImpl<>(); datasource.setup(dsContext, dataSupplier, metaClass.getName() + "Ds", metaClass, view); ((DatasourceImpl) datasource).valid(); dsContext.register(datasource); createNestedEmbeddedDatasources(datasource, metaClass, ""); Entity instance = metadata.create(metaClass); createEmbeddedFields(metaClass, instance, ""); datasource.setItem(instance); datasource.setAllowCommit(false); createDataComponents(); } protected void createDataComponents() { if (managedFields.isEmpty()) { infoLabel.setValue(getMessage("bulk.noEditableProperties")); applyButton.setVisible(false); return; } GridLayout grid = componentsFactory.createComponent(GridLayout.class); grid.setSpacing(true); grid.setColumns(4); grid.setRows((managedFields.size() + 1) / 2); grid.setStyleName("c-bulk-editor-grid"); contentPane.add(grid); grid.setFrame(frame); List<ManagedField> editFields = new ArrayList<>(managedFields.values()); editFields.sort((o1, o2) -> o1.getLocalizedName().compareTo(o2.getLocalizedName())); String fieldWidth = themeConstants.get("cuba.gui.BulkEditorWindow.field.width"); for (ManagedField field : editFields) { Label label = componentsFactory.createComponent(Label.class); label.setFrame(getFrame()); label.setValue(field.getLocalizedName()); label.setAlignment(Alignment.TOP_LEFT); label.setStyleName("field-label"); if (AppConfig.getClientType() == ClientType.DESKTOP) { label.setHeight("25px"); } grid.add(label); Datasource<Entity> fieldDs = datasource; // field owner metaclass is embeddable only if field domain embeddable, // so we can check field domain if (metadataTools.isEmbeddable(field.getMetaProperty().getDomain())) { fieldDs = datasources.get(field.getParentFqn()); } final Field editField = fieldFactory.createField(fieldDs, field.getMetaProperty()); if (editField != null) { editField.setFrame(getFrame()); editField.setWidth(fieldWidth); boolean required = editField.isRequired(); BoxLayout boxLayout = componentsFactory.createComponent(HBoxLayout.class); boxLayout.setFrame(getFrame()); boxLayout.setSpacing(true); boxLayout.add(editField); if (!required) { final Button clearButton = componentsFactory.createComponent(Button.class); clearButton.setFrame(getFrame()); Action action = new AbstractAction("actions.BulkClear") { @Override public void actionPerform(Component component) { editField.setEnabled(!editField.isEnabled()); if (!editField.isEnabled()) { editField.setValue(null); setIcon("icons/edit.png"); clearButton.setDescription(getMessage("bulk.editAttribute")); } else { setIcon("icons/trash.png"); clearButton.setDescription(getMessage("bulk.clearAttribute")); } } }; action.setCaption(""); action.setIcon("icons/trash.png"); clearButton.setAction(action); clearButton.setDescription(getMessage("bulk.clearAttribute")); boxLayout.add(clearButton); } editField.setRequired(false); editField.setValue(null); if (fieldValidators != null) { Field.Validator validator = fieldValidators.get(field.getFqn()); if (validator != null) { editField.addValidator(validator); } } grid.add(boxLayout); dataFields.put(field.getFqn(), editField); } else { Label unknownLabel = componentsFactory.createComponent(Label.class); unknownLabel.setFrame(getFrame()); grid.add(unknownLabel); } } if (!dataFields.isEmpty()) { dataFields.values().iterator().next().requestFocus(); } } protected boolean isByteArray(MetaProperty metaProperty) { return ByteArrayDatatype.NAME.equals(metaProperty.getRange().asDatatype().getName()); } protected boolean isUuid(MetaProperty metaProperty) { return UUIDDatatype.NAME.equals(metaProperty.getRange().asDatatype().getName()); } /** * Recursively instantiates the embedded properties. * E.g. embedded properties of the embedded property will also be instantiated. * * @param metaClass meta class of the entity * @param item entity instance */ protected void createEmbeddedFields(MetaClass metaClass, Entity item, String fqnPrefix) { for (MetaProperty metaProperty : metaClass.getProperties()) { String fqn = metaProperty.getName(); if (StringUtils.isNotEmpty(fqnPrefix)) { fqn = fqnPrefix + "." + fqn; } if (managedEmbeddedProperties.contains(fqn) && metadataTools.isEmbedded(metaProperty)) { MetaClass embeddedMetaClass = metaProperty.getRange().asClass(); Entity embedded = item.getValue(metaProperty.getName()); if (embedded == null) { embedded = metadata.create(embeddedMetaClass); item.setValue(metaProperty.getName(), embedded); } createEmbeddedFields(embeddedMetaClass, embedded, fqn); } } } protected void createNestedEmbeddedDatasources(Datasource masterDs, MetaClass metaClass, String fqnPrefix) { for (MetaProperty metaProperty : metaClass.getProperties()) { if (MetaProperty.Type.ASSOCIATION == metaProperty.getType() || MetaProperty.Type.COMPOSITION == metaProperty.getType()) { String fqn = metaProperty.getName(); if (StringUtils.isNotEmpty(fqnPrefix)) { fqn = fqnPrefix + "." + fqn; } if (managedEmbeddedProperties.contains(fqn) && metadataTools.isEmbedded(metaProperty)) { MetaClass propertyMetaClass = metaProperty.getRange().asClass(); @SuppressWarnings("unchecked") NestedDatasource<Entity> propertyDs = new EmbeddedDatasourceImpl(); propertyDs.setup(fqn + "Ds", masterDs, metaProperty.getName()); propertyDs.setAllowCommit(false); createNestedEmbeddedDatasources(propertyDs, propertyMetaClass, fqn); datasources.put(fqn, propertyDs); dsContext.register(propertyDs); } } } } /** * Creates a view, loading only neccessary properties. * Referenced entities will be loaded with a MINIMAL view. * * @param meta meta class * @return View instance */ protected View createView(MetaClass meta) { @SuppressWarnings("unchecked") View view = new View(meta.getJavaClass(), false); for (MetaProperty metaProperty : meta.getProperties()) { if (!managedFields.containsKey(metaProperty.getName()) && !managedEmbeddedProperties.contains(metaProperty.getName())) { continue; } switch (metaProperty.getType()) { case DATATYPE: case ENUM: view.addProperty(metaProperty.getName()); break; case ASSOCIATION: case COMPOSITION: View propView; if (!metadataTools.isEmbedded(metaProperty)) { propView = viewRepository.getView(metaProperty.getRange().asClass(), View.MINIMAL); //in some cases JPA loads extended entities as instance of base class which leads to ClassCastException //loading property lazy prevents this from happening view.addProperty(metaProperty.getName(), propView, true); } else { // build view for embedded property propView = createEmbeddedView(metaProperty.getRange().asClass(), metaProperty.getName()); view.addProperty(metaProperty.getName(), propView, false); } break; default: throw new IllegalStateException("unknown property type"); } } return view; } protected View createEmbeddedView(MetaClass meta, String fqnPrefix) { @SuppressWarnings("unchecked") View view = new View(meta.getJavaClass(), false); for (MetaProperty metaProperty : meta.getProperties()) { String fqn = fqnPrefix + "." + metaProperty.getName(); if (!managedFields.containsKey(fqn)) { continue; } switch (metaProperty.getType()) { case DATATYPE: case ENUM: view.addProperty(metaProperty.getName()); break; case ASSOCIATION: case COMPOSITION: View propView; if (!metadataTools.isEmbedded(metaProperty)) { propView = viewRepository.getView(metaProperty.getRange().asClass(), View.MINIMAL); } else { // build view for embedded property propView = createEmbeddedView(metaProperty.getRange().asClass(), fqn); } //in some cases JPA loads extended entities as instance of base class which leads to ClassCastException //loading property lazy prevents this from happening view.addProperty(metaProperty.getName(), propView, true); break; default: throw new IllegalStateException("unknown property type"); } } return view; } protected boolean isPermitted(MetaClass metaClass, MetaProperty metaProperty) { return security.isEntityAttrPermitted(metaClass, metaProperty.getName(), EntityAttrAccess.MODIFY); } protected boolean isManagedAttribute(MetaClass metaClass, MetaProperty metaProperty) { if (metadataTools.isSystem(metaProperty) || metadataTools.isTransient(metaProperty) || metadataTools.isSystemLevel(metaProperty) || metaProperty.getRange().getCardinality().isMany() || !isPermitted(metaClass, metaProperty)) { return false; } if (metaProperty.getRange().isDatatype() && (isByteArray(metaProperty) || isUuid(metaProperty))) { return false; } if (metaProperty.getRange().isClass()) { MetaClass propertyMetaClass = metaProperty.getRange().asClass(); if (metadataTools.isSystemLevel(propertyMetaClass)) { return false; } if (!security.isEntityOpPermitted(propertyMetaClass, EntityOp.READ)) { return false; } } return !(excludeRegex != null && excludeRegex.matcher(metaProperty.getName()).matches()); } protected List<ManagedField> getManagedFields(MetaClass metaClass) { List<ManagedField> managedFields = new ArrayList<>(); // sort Fields for (MetaProperty metaProperty : metaClass.getProperties()) { if (isManagedAttribute(metaClass, metaProperty)) { String propertyCaption = messageTools.getPropertyCaption(metaClass, metaProperty.getName()); if (!metadataTools.isEmbedded(metaProperty)) { managedFields.add(new ManagedField(metaProperty.getName(), metaProperty, propertyCaption, null)); } else { List<ManagedField> nestedFields = getManagedFields(metaProperty, metaProperty.getName(), propertyCaption); if (nestedFields.size() > 0) { managedEmbeddedProperties.add(metaProperty.getName()); } managedFields.addAll(nestedFields); } } } return managedFields; } protected List<ManagedField> getManagedFields(MetaProperty embeddedProperty, String fqnPrefix, String localePrefix) { List<ManagedField> managedFields = new ArrayList<>(); MetaClass metaClass = embeddedProperty.getRange().asClass(); for (MetaProperty metaProperty : metaClass.getProperties()) { if (isManagedAttribute(metaClass, metaProperty)) { String fqn = fqnPrefix + "." + metaProperty.getName(); String localeName = localePrefix + " " + messageTools.getPropertyCaption(metaClass, metaProperty.getName()); if (!metadataTools.isEmbedded(metaProperty)) { managedFields.add(new ManagedField(fqn, metaProperty, localeName, fqnPrefix)); } else { List<ManagedField> nestedFields = getManagedFields(metaProperty, fqn, localeName); if (nestedFields.size() > 0) { managedEmbeddedProperties.add(fqn); } managedFields.addAll(nestedFields); } } } return managedFields; } @Override protected boolean preClose(String actionId) { if (actionId.equals(CLOSE_ACTION_ID)) { cancelChanges(); return false; } return super.preClose(actionId); } public void cancelChanges() { if (hasChanges()) { showOptionDialog(messages.getMainMessage("closeUnsaved.caption"), messages.getMainMessage("closeUnsaved"), MessageType.CONFIRMATION, new Action[]{ new DialogAction(Type.YES) { @Override public void actionPerform(Component component) { close(CLOSE_ACTION_ID, true); } }, new DialogAction(Type.NO, Status.PRIMARY) }); } else { close(CLOSE_ACTION_ID, true); } } private boolean hasChanges() { for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { return true; } } return false; } public void applyChanges() { if (validateAll()) { StringBuilder sb = new StringBuilder(); if (modelValidators != null) { for (Field.Validator moduleValidator : modelValidators) { try { moduleValidator.validate(datasource); } catch (ValidationException e) { sb.append(e.getMessage()); sb.append("\n"); } } } if (sb.length() == 0) { List<String> fields = new ArrayList<>(); for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { String localizedName = managedFields.get(fieldEntry.getKey()).getLocalizedName(); fields.add("- " + localizedName); } } if (!fields.isEmpty()) { showOptionDialog(getMessage("bulk.confirmation"), formatMessage("bulk.applyConfirmation", items.size(), StringUtils.join(fields, "\n")), MessageType.CONFIRMATION, new Action[]{ new AbstractAction("actions.Apply") { { icon = themeConstants.get("actions.dialog.Ok.icon"); } @Override public void actionPerform(Component component) { commitChanges(); } }, new DialogAction(Type.CANCEL, Status.PRIMARY) }); } else { showNotification(getMessage("bulk.noChanges"), NotificationType.HUMANIZED); } } else { showNotification(sb.toString(), NotificationType.TRAY); } } } protected List<Entity> loadItems(View view) { LoadContext<Entity> lc = new LoadContext<>(metaClass); lc.setSoftDeletion(false); List<UUID> ids = new ArrayList<>(); for (Entity item : selected) { ids.add(((BaseUuidEntity) item).getId()); } LoadContext.Query query = new LoadContext.Query(String.format("select e from %s e where e.id in :ids", metaClass)); query.setParameter("ids", ids); lc.setQuery(query); lc.setView(view); return dataSupplier.loadList(lc); } protected void commitChanges() { List<String> fields = new ArrayList<>(); for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (field.getValue() != null || !field.isEnabled()) { fields.add(managedFields.get(fieldEntry.getKey()).getFqn()); } } for (Map.Entry<String, Field> fieldEntry : dataFields.entrySet()) { Field field = fieldEntry.getValue(); if (!field.isEnabled()) { for (Entity item : items) { ensureEmbeddedPropertyCreated(item, fieldEntry.getKey()); item.setValueEx(fieldEntry.getKey(), null); } } else if (field.getValue() != null) { for (Entity item : items) { ensureEmbeddedPropertyCreated(item, fieldEntry.getKey()); item.setValueEx(fieldEntry.getKey(), field.getValue()); } } } Set<Entity> commited = dataSupplier.commit(new CommitContext(items)); Logger logger = LoggerFactory.getLogger(BulkEditorWindow.class); logger.info("Applied bulk editing for {} entries of {}. Changed properties: {}", commited.size(), metaClass, StringUtils.join(fields, ", ")); showNotification(formatMessage("bulk.successMessage", commited.size()), NotificationType.HUMANIZED); close(COMMIT_ACTION_ID); } protected void ensureEmbeddedPropertyCreated(Entity item, String propertyPath) { if (!StringUtils.contains(propertyPath, ".")) { return; } MetaPropertyPath path = metaClass.getPropertyPath(propertyPath); if (path != null) { Entity currentItem = item; for (MetaProperty property : path.getMetaProperties()) { if (metadataTools.isEmbedded(property)) { Object currentItemValue = currentItem.getValue(property.getName()); if (currentItemValue == null) { Entity newItem = metadata.create(property.getRange().asClass()); currentItem.setValue(property.getName(), newItem); currentItem = newItem; } else { currentItem = (Entity) currentItemValue; } } else { break; } } } } protected static class ManagedField { protected final String fqn; protected final String parentFqn; protected final String localizedName; protected final MetaProperty metaProperty; public ManagedField(String fqn, MetaProperty metaProperty, String localizedName, String parentFqn) { this.fqn = fqn; this.metaProperty = metaProperty; this.localizedName = localizedName; this.parentFqn = parentFqn; } public String getFqn() { return fqn; } public String getParentFqn() { return parentFqn; } public String getLocalizedName() { return localizedName; } public MetaProperty getMetaProperty() { return metaProperty; } } }
PL-8858 Support BaseGenericIdEntity with natural keys in BulkEditor
modules/gui/src/com/haulmont/cuba/gui/app/core/bulk/BulkEditorWindow.java
PL-8858 Support BaseGenericIdEntity with natural keys in BulkEditor
Java
apache-2.0
9e8fc84e21800d3703f62a1f6748061fe96a98df
0
140293816/Hawkular-fork,tsegismont/hawkular-metrics,jotak/hawkular-metrics,pilhuhn/rhq-metrics,ppalaga/hawkular-metrics,jshaughn/hawkular-metrics,spadgett/hawkular-metrics,spadgett/hawkular-metrics,tsegismont/hawkular-metrics,burmanm/hawkular-metrics,hawkular/hawkular-metrics,140293816/Hawkular-fork,burmanm/hawkular-metrics,140293816/Hawkular-fork,pilhuhn/rhq-metrics,140293816/Hawkular-fork,tsegismont/hawkular-metrics,spadgett/hawkular-metrics,hawkular/hawkular-metrics,tsegismont/hawkular-metrics,jotak/hawkular-metrics,jshaughn/hawkular-metrics,jshaughn/hawkular-metrics,hawkular/hawkular-metrics,ppalaga/hawkular-metrics,jotak/hawkular-metrics,mwringe/hawkular-metrics,ppalaga/hawkular-metrics,hawkular/hawkular-metrics,jotak/hawkular-metrics,spadgett/hawkular-metrics,mwringe/hawkular-metrics,mwringe/hawkular-metrics,pilhuhn/rhq-metrics,ppalaga/hawkular-metrics,pilhuhn/rhq-metrics,burmanm/hawkular-metrics,mwringe/hawkular-metrics,burmanm/hawkular-metrics,spadgett/hawkular-metrics,jshaughn/hawkular-metrics
/* * Copyright 2014-2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.tasks.impl; import static java.util.Arrays.asList; import static org.joda.time.DateTime.now; import static org.joda.time.Duration.standardSeconds; import static org.testng.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import org.hawkular.metrics.tasks.BaseTest; import org.hawkular.metrics.tasks.api.TaskType; import org.joda.time.DateTime; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; /** * @author jsanda */ public class TaskSchedulerTest extends BaseTest { private LeaseService leaseService; @BeforeClass public void initClass() { leaseService = new LeaseService(session, queries); } @Test public void startScheduler() throws Exception { List<TaskType> taskTypes = asList(new TaskType().setName("test").setSegments(1).setSegmentOffsets(1)); List<DateTime> actualTimeSlices = new ArrayList<>(); List<DateTime> expectedTimeSlices = asList( dateTimeService.getTimeSlice(now().plusSeconds(2), standardSeconds(1)), dateTimeService.getTimeSlice(now().plusSeconds(3), standardSeconds(1)), dateTimeService.getTimeSlice(now().plusSeconds(4), standardSeconds(1)) ); TaskServiceImpl taskService = new TaskServiceImpl(session, queries, leaseService, taskTypes) { @Override public void executeTasks(DateTime timeSlice) { actualTimeSlices.add(timeSlice); } }; taskService.setTimeUnit(TimeUnit.SECONDS); taskService.start(); Thread.sleep(5000); taskService.shutdown(); assertTrue(actualTimeSlices.size() >= 3, "Expected task execution to be scheduled at least 3 times but it " + "was scheduled " + actualTimeSlices.size() + " times."); assertTrue(actualTimeSlices.size() <= 6, "Expected no more that 5 task executions to be scheduled since " + "shutdown was called, but it was scheduled " + actualTimeSlices.size() + " times."); assertTaskExecutionScheduleForTimeSlices(actualTimeSlices, expectedTimeSlices); } private void assertTaskExecutionScheduleForTimeSlices(List<DateTime> actualTimeSlices, List<DateTime> expectedTimeSlices) { expectedTimeSlices.forEach(timeSlice -> assertTrue(actualTimeSlices.contains(timeSlice), "Expected task execution to be scheduled for time slice " + timeSlice)); } }
task-queue/src/test/java/org/hawkular/metrics/tasks/impl/TaskSchedulerTest.java
/* * Copyright 2014-2015 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.metrics.tasks.impl; import static java.util.Arrays.asList; import static org.joda.time.DateTime.now; import static org.joda.time.Duration.standardSeconds; import static org.testng.Assert.assertTrue; import java.util.ArrayList; import java.util.List; import java.util.concurrent.TimeUnit; import org.hawkular.metrics.tasks.BaseTest; import org.hawkular.metrics.tasks.api.TaskType; import org.joda.time.DateTime; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; /** * @author jsanda */ public class TaskSchedulerTest extends BaseTest { private LeaseService leaseService; @BeforeClass public void initClass() { leaseService = new LeaseService(session, queries); } @Test public void startScheduler() throws Exception { List<TaskType> taskTypes = asList(new TaskType().setName("test").setSegments(1).setSegmentOffsets(1)); List<DateTime> actualTimeSlices = new ArrayList<>(); List<DateTime> expectedTimeSlices = asList( dateTimeService.getTimeSlice(now().plusSeconds(1), standardSeconds(2)), dateTimeService.getTimeSlice(now().plusSeconds(2), standardSeconds(3)), dateTimeService.getTimeSlice(now().plusSeconds(2), standardSeconds(3)) ); TaskServiceImpl taskService = new TaskServiceImpl(session, queries, leaseService, taskTypes) { @Override public void executeTasks(DateTime timeSlice) { actualTimeSlices.add(timeSlice); } }; taskService.setTimeUnit(TimeUnit.SECONDS); taskService.start(); Thread.sleep(4000); taskService.shutdown(); assertTrue(actualTimeSlices.size() >= 3, "Expected task execution to be scheduled at least 3 times but it " + "was scheduled " + actualTimeSlices.size() + " times."); assertTrue(actualTimeSlices.size() <= 5, "Exepected no more that 5 task executions to be scheduled since " + "shutdown was called, but it was scheduled " + actualTimeSlices.size() + " times."); assertTaskExecutionScheduleForTimeSlices(actualTimeSlices, expectedTimeSlices); } private void assertTaskExecutionScheduleForTimeSlices(List<DateTime> actualTimeSlices, List<DateTime> expectedTimeSlices) { expectedTimeSlices.forEach(timeSlice -> assertTrue(actualTimeSlices.contains(timeSlice), "Expected task execution to be scheduled for time slice " + timeSlice)); } }
[HWKMETRICS-52] try to make test less prone to failure on CI builds
task-queue/src/test/java/org/hawkular/metrics/tasks/impl/TaskSchedulerTest.java
[HWKMETRICS-52] try to make test less prone to failure on CI builds
Java
apache-2.0
938cd139701953dcd53de223865a876633572252
0
wikimedia/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia,wikimedia/apps-android-wikipedia,wikimedia/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia,dbrant/apps-android-wikipedia
package org.wikipedia.feed.aggregated; import android.content.Context; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import org.wikipedia.dataclient.WikiSite; import org.wikipedia.dataclient.retrofit.RetrofitFactory; import org.wikipedia.feed.dataclient.FeedClient; import org.wikipedia.feed.featured.FeaturedArticleCard; import org.wikipedia.feed.image.FeaturedImageCard; import org.wikipedia.feed.model.Card; import org.wikipedia.feed.model.UtcDate; import org.wikipedia.feed.mostread.MostReadListCard; import org.wikipedia.feed.news.NewsListCard; import org.wikipedia.feed.onthisday.OnThisDayCard; import org.wikipedia.settings.Prefs; import org.wikipedia.util.DateUtil; import org.wikipedia.util.log.L; import java.util.ArrayList; import java.util.List; import java.util.Locale; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.GET; import retrofit2.http.Headers; import retrofit2.http.Path; import static org.wikipedia.Constants.ACCEPT_HEADER_PREFIX; public class AggregatedFeedContentClient { @Nullable private Call<AggregatedFeedContent> call; @Nullable private WikiSite wiki; @Nullable private AggregatedFeedContent aggregatedResponse; private int aggregatedResponseAge = -1; public static class OnThisDayFeed extends BaseClient { public OnThisDayFeed(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.onthisday() != null && !content.onthisday().isEmpty()) { outCards.add(new OnThisDayCard(content.onthisday(), wiki, age)); } } } public static class InTheNews extends BaseClient { public InTheNews(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { // todo: remove age check when news endpoint provides dated content, T139481. if (age == 0 && content.news() != null) { outCards.add(new NewsListCard(content.news(), age, wiki)); } } } public static class FeaturedArticle extends BaseClient { public FeaturedArticle(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.tfa() != null) { outCards.add(new FeaturedArticleCard(content.tfa(), age, wiki)); } } } public static class TrendingArticles extends BaseClient { public TrendingArticles(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.mostRead() != null) { outCards.add(new MostReadListCard(content.mostRead(), wiki)); } } } public static class FeaturedImage extends BaseClient { public FeaturedImage(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.potd() != null) { outCards.add(new FeaturedImageCard(content.potd(), age, wiki)); } } } void setAggregatedResponse(@Nullable AggregatedFeedContent content, int age, @Nullable WikiSite wiki) { aggregatedResponse = content; this.aggregatedResponseAge = age; this.wiki = wiki; } void requestAggregated(@NonNull WikiSite wiki, int age, @NonNull retrofit2.Callback<AggregatedFeedContent> cb) { cancel(); UtcDate date = DateUtil.getUtcRequestDateFor(age); String endpoint = String.format(Locale.ROOT, Prefs.getRestbaseUriFormat(), wiki.scheme(), wiki.authority()); Retrofit retrofit = RetrofitFactory.newInstance(endpoint, wiki); AggregatedFeedContentClient.Service service = retrofit.create(Service.class); call = service.get(date.year(), date.month(), date.date()); call.enqueue(cb); } public void cancel() { if (call == null) { return; } call.cancel(); call = null; } private interface Service { /** * Gets aggregated content for the feed for the date provided. * * @param year four-digit year * @param month two-digit month * @param day two-digit day */ @NonNull @Headers(ACCEPT_HEADER_PREFIX + "aggregated-feed/0.5.0\"") @GET("feed/featured/{year}/{month}/{day}") Call<AggregatedFeedContent> get(@Path("year") String year, @Path("month") String month, @Path("day") String day); } private abstract static class BaseClient implements FeedClient, retrofit2.Callback<AggregatedFeedContent> { @NonNull private AggregatedFeedContentClient aggregatedClient; @Nullable private Callback cb; private WikiSite wiki; private int age; BaseClient(@NonNull AggregatedFeedContentClient aggregatedClient) { this.aggregatedClient = aggregatedClient; } abstract void getCardFromResponse(@NonNull AggregatedFeedContent response, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards); @Override public void request(@NonNull Context context, @NonNull WikiSite wiki, int age, @NonNull Callback cb) { this.cb = cb; this.age = age; this.wiki = wiki; if (aggregatedClient.aggregatedResponseAge == age && aggregatedClient.aggregatedResponse != null && wiki.equals(aggregatedClient.wiki)) { List<Card> cards = new ArrayList<>(); getCardFromResponse(aggregatedClient.aggregatedResponse, wiki, age, cards); cb.success(cards); } else { aggregatedClient.requestAggregated(wiki, age, this); } } @Override public void cancel() { } @Override public void onResponse(@NonNull Call<AggregatedFeedContent> call, @NonNull Response<AggregatedFeedContent> response) { AggregatedFeedContent content = response.body(); if (content == null) { if (cb != null) { cb.error(new RuntimeException("Aggregated response was not in the correct format.")); } return; } aggregatedClient.setAggregatedResponse(content, age, wiki); List<Card> cards = new ArrayList<>(); if (aggregatedClient.aggregatedResponse != null) { getCardFromResponse(aggregatedClient.aggregatedResponse, wiki, age, cards); } if (cb != null) { cb.success(cards); } } @Override public void onFailure(@NonNull Call<AggregatedFeedContent> call, @NonNull Throwable caught) { L.v(caught); if (cb != null) { cb.error(caught); } } } }
app/src/main/java/org/wikipedia/feed/aggregated/AggregatedFeedContentClient.java
package org.wikipedia.feed.aggregated; import android.content.Context; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import org.wikipedia.dataclient.WikiSite; import org.wikipedia.dataclient.retrofit.RetrofitFactory; import org.wikipedia.feed.dataclient.FeedClient; import org.wikipedia.feed.featured.FeaturedArticleCard; import org.wikipedia.feed.image.FeaturedImageCard; import org.wikipedia.feed.model.Card; import org.wikipedia.feed.model.UtcDate; import org.wikipedia.feed.mostread.MostReadListCard; import org.wikipedia.feed.news.NewsListCard; import org.wikipedia.feed.onthisday.OnThisDayCard; import org.wikipedia.settings.Prefs; import org.wikipedia.util.DateUtil; import org.wikipedia.util.log.L; import java.util.ArrayList; import java.util.List; import java.util.Locale; import retrofit2.Call; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.http.GET; import retrofit2.http.Headers; import retrofit2.http.Path; import static org.wikipedia.Constants.ACCEPT_HEADER_PREFIX; public class AggregatedFeedContentClient { @Nullable private Call<AggregatedFeedContent> call; @Nullable private AggregatedFeedContent aggregatedResponse; private int aggregatedResponseAge = -1; public static class OnThisDayFeed extends BaseClient { public OnThisDayFeed(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.onthisday() != null && !content.onthisday().isEmpty()) { outCards.add(new OnThisDayCard(content.onthisday(), wiki, age)); } } } public static class InTheNews extends BaseClient { public InTheNews(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { // todo: remove age check when news endpoint provides dated content, T139481. if (age == 0 && content.news() != null) { outCards.add(new NewsListCard(content.news(), age, wiki)); } } } public static class FeaturedArticle extends BaseClient { public FeaturedArticle(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.tfa() != null) { outCards.add(new FeaturedArticleCard(content.tfa(), age, wiki)); } } } public static class TrendingArticles extends BaseClient { public TrendingArticles(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.mostRead() != null) { outCards.add(new MostReadListCard(content.mostRead(), wiki)); } } } public static class FeaturedImage extends BaseClient { public FeaturedImage(@NonNull AggregatedFeedContentClient aggregatedClient) { super(aggregatedClient); } @Override void getCardFromResponse(@NonNull AggregatedFeedContent content, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards) { if (content.potd() != null) { outCards.add(new FeaturedImageCard(content.potd(), age, wiki)); } } } void setAggregatedResponse(@Nullable AggregatedFeedContent content, int age) { aggregatedResponse = content; this.aggregatedResponseAge = age; } @Nullable AggregatedFeedContent getCurrentResponse() { return aggregatedResponse; } int getCurrentAge() { return aggregatedResponseAge; } void requestAggregated(@NonNull WikiSite wiki, int age, @NonNull retrofit2.Callback<AggregatedFeedContent> cb) { cancel(); UtcDate date = DateUtil.getUtcRequestDateFor(age); String endpoint = String.format(Locale.ROOT, Prefs.getRestbaseUriFormat(), wiki.scheme(), wiki.authority()); Retrofit retrofit = RetrofitFactory.newInstance(endpoint, wiki); AggregatedFeedContentClient.Service service = retrofit.create(Service.class); call = service.get(date.year(), date.month(), date.date()); call.enqueue(cb); } public void cancel() { if (call == null) { return; } call.cancel(); call = null; } private interface Service { /** * Gets aggregated content for the feed for the date provided. * * @param year four-digit year * @param month two-digit month * @param day two-digit day */ @NonNull @Headers(ACCEPT_HEADER_PREFIX + "aggregated-feed/0.5.0\"") @GET("feed/featured/{year}/{month}/{day}") Call<AggregatedFeedContent> get(@Path("year") String year, @Path("month") String month, @Path("day") String day); } private abstract static class BaseClient implements FeedClient, retrofit2.Callback<AggregatedFeedContent> { @NonNull private AggregatedFeedContentClient aggregatedClient; @Nullable private Callback cb; private WikiSite wiki; private int age; BaseClient(@NonNull AggregatedFeedContentClient aggregatedClient) { this.aggregatedClient = aggregatedClient; } abstract void getCardFromResponse(@NonNull AggregatedFeedContent response, @NonNull WikiSite wiki, int age, @NonNull List<Card> outCards); @Override public void request(@NonNull Context context, @NonNull WikiSite wiki, int age, @NonNull Callback cb) { this.cb = cb; this.age = age; if (aggregatedClient.getCurrentAge() == age && aggregatedClient.getCurrentResponse() != null && wiki.equals(this.wiki)) { List<Card> cards = new ArrayList<>(); getCardFromResponse(aggregatedClient.getCurrentResponse(), wiki, age, cards); cb.success(cards); } else { aggregatedClient.requestAggregated(wiki, age, this); } this.wiki = wiki; } @Override public void cancel() { } @Override public void onResponse(@NonNull Call<AggregatedFeedContent> call, @NonNull Response<AggregatedFeedContent> response) { AggregatedFeedContent content = response.body(); if (content == null) { if (cb != null) { cb.error(new RuntimeException("Aggregated response was not in the correct format.")); } return; } aggregatedClient.setAggregatedResponse(content, age); List<Card> cards = new ArrayList<>(); if (aggregatedClient.getCurrentResponse() != null) { getCardFromResponse(aggregatedClient.getCurrentResponse(), wiki, age, cards); } if (cb != null) { cb.success(cards); } } @Override public void onFailure(@NonNull Call<AggregatedFeedContent> call, @NonNull Throwable caught) { L.v(caught); if (cb != null) { cb.error(caught); } } } }
Fix multiple unnecessary requests to aggregated feed content. Due to a previous bug fix (relating to switching the wiki language), the logic was causing the AggregatedClient to make multiple requests to get the content (one request per each type of card), when in fact it's designed to make a single request and use it for all card types. Change-Id: Ibb5f4348ddd9033a1b78e8a12f393992d42fa17c
app/src/main/java/org/wikipedia/feed/aggregated/AggregatedFeedContentClient.java
Fix multiple unnecessary requests to aggregated feed content.
Java
apache-2.0
2dfd2c38d9bc821c5881b434aa0af5cd8a97eff1
0
apache/logging-log4j2,lqbweb/logging-log4j2,codescale/logging-log4j2,codescale/logging-log4j2,apache/logging-log4j2,codescale/logging-log4j2,lqbweb/logging-log4j2,xnslong/logging-log4j2,xnslong/logging-log4j2,xnslong/logging-log4j2,apache/logging-log4j2,lqbweb/logging-log4j2,GFriedrich/logging-log4j2,GFriedrich/logging-log4j2,GFriedrich/logging-log4j2
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.filter; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.core.AbstractLifeCycle; import org.apache.logging.log4j.core.Filter; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.core.config.Node; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginElement; import org.apache.logging.log4j.core.config.plugins.PluginFactory; import org.apache.logging.log4j.core.util.ObjectArrayIterator; import org.apache.logging.log4j.message.Message; /** * Composes and invokes one or more filters. */ @Plugin(name = "filters", category = Node.CATEGORY, printObject = true) public final class CompositeFilter extends AbstractLifeCycle implements Iterable<Filter>, Filter { private static final Filter[] EMPTY_FILTERS = new Filter[0]; private final Filter[] filters; private CompositeFilter() { this.filters = EMPTY_FILTERS; } private CompositeFilter(final Filter[] filters) { this.filters = filters == null ? EMPTY_FILTERS : filters; } public CompositeFilter addFilter(final Filter filter) { if (filter == null) { // null does nothing return this; } final Filter[] copy = Arrays.copyOf(this.filters, this.filters.length + 1); copy[this.filters.length] = filter; return new CompositeFilter(copy); } public CompositeFilter removeFilter(final Filter filter) { if (filter == null) { // null does nothing return this; } // This is not a great implementation but simpler than copying Apache Commons // Lang ArrayUtils.removeElement() and associated bits (MutableInt), // which is OK since removing a filter should not be on the critical path. final List<Filter> filterList = Arrays.asList(this.filters); filterList.remove(filter); return new CompositeFilter(filterList.toArray(new Filter[this.filters.length - 1])); } @Override public Iterator<Filter> iterator() { return new ObjectArrayIterator<>(filters); } /** * Gets a new list over the internal filter array. * * @return a new list over the internal filter array * @deprecated Use {@link #getFiltersArray()} */ @Deprecated public List<Filter> getFilters() { return Arrays.asList(filters); } public Filter[] getFiltersArray() { return filters; } /** * Returns whether this composite contains any filters. * * @return whether this composite contains any filters. */ public boolean isEmpty() { return this.filters.length == 0; } public int size() { return filters.length; } @Override public void start() { this.setStarting(); for (final Filter filter : filters) { filter.start(); } this.setStarted(); } @Override public void stop() { this.setStopping(); for (final Filter filter : filters) { filter.stop(); } this.setStopped(); } /** * Returns the result that should be returned when the filter does not match the event. * * @return the Result that should be returned when the filter does not match the event. */ @Override public Result getOnMismatch() { return Result.NEUTRAL; } /** * Returns the result that should be returned when the filter matches the event. * * @return the Result that should be returned when the filter matches the event. */ @Override public Result getOnMatch() { return Result.NEUTRAL; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param params * An array of parameters or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object... params) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, params); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @param p5 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4, p5); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @param p5 the message parameters * @param p6 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Object p6) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4, p5, p6); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @param p5 the message parameters * @param p6 the message parameters * @param p7 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Object p6, final Object p7) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4, p5, p6, p7); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @param p5 the message parameters * @param p6 the message parameters * @param p7 the message parameters * @param p8 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Object p6, final Object p7, final Object p8) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4, p5, p6, p7, p8); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param p0 the message parameters * @param p1 the message parameters * @param p2 the message parameters * @param p3 the message parameters * @param p4 the message parameters * @param p5 the message parameters * @param p6 the message parameters * @param p7 the message parameters * @param p8 the message parameters * @param p9 the message parameters * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object p0, final Object p1, final Object p2, final Object p3, final Object p4, final Object p5, final Object p6, final Object p7, final Object p8, final Object p9) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, p0, p1, p2, p3, p4, p5, p6, p7, p8, p9); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * Any Object. * @param t * A Throwable or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final Object msg, final Throwable t) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, t); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * The Message * @param t * A Throwable or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final Message msg, final Throwable t) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, t); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param event * The Event to filter on. * @return the Result. */ @Override public Result filter(final LogEvent event) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(event); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < filters.length; i++) { if (sb.length() == 0) { sb.append('{'); } else { sb.append(", "); } sb.append(filters[i].toString()); } if (sb.length() > 0) { sb.append('}'); } return sb.toString(); } /** * Create a CompositeFilter. * * @param filters * An array of Filters to call. * @return The CompositeFilter. */ @PluginFactory public static CompositeFilter createFilters(@PluginElement("Filters") final Filter[] filters) { return new CompositeFilter(filters); } }
log4j-core/src/main/java/org/apache/logging/log4j/core/filter/CompositeFilter.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache license, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the license for the specific language governing permissions and * limitations under the license. */ package org.apache.logging.log4j.core.filter; import java.util.Arrays; import java.util.Iterator; import java.util.List; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.Marker; import org.apache.logging.log4j.core.AbstractLifeCycle; import org.apache.logging.log4j.core.Filter; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.core.Logger; import org.apache.logging.log4j.core.config.Node; import org.apache.logging.log4j.core.config.plugins.Plugin; import org.apache.logging.log4j.core.config.plugins.PluginElement; import org.apache.logging.log4j.core.config.plugins.PluginFactory; import org.apache.logging.log4j.core.util.ObjectArrayIterator; import org.apache.logging.log4j.message.Message; /** * Composes and invokes one or more filters. */ @Plugin(name = "filters", category = Node.CATEGORY, printObject = true) public final class CompositeFilter extends AbstractLifeCycle implements Iterable<Filter>, Filter { private static final Filter[] EMPTY_FILTERS = new Filter[0]; private final Filter[] filters; private CompositeFilter() { this.filters = EMPTY_FILTERS; } private CompositeFilter(final Filter[] filters) { this.filters = filters == null ? EMPTY_FILTERS : filters; } public CompositeFilter addFilter(final Filter filter) { if (filter == null) { // null does nothing return this; } final Filter[] copy = Arrays.copyOf(this.filters, this.filters.length + 1); copy[this.filters.length] = filter; return new CompositeFilter(copy); } public CompositeFilter removeFilter(final Filter filter) { if (filter == null) { // null does nothing return this; } // This is not a great implementation but simpler than copying Apache Commons // Lang ArrayUtils.removeElement() and associated bits (MutableInt), // which is OK since removing a filter should not be on the critical path. final List<Filter> filterList = Arrays.asList(this.filters); filterList.remove(filter); return new CompositeFilter(filterList.toArray(new Filter[this.filters.length - 1])); } @Override public Iterator<Filter> iterator() { return new ObjectArrayIterator<>(filters); } /** * Gets a new list over the internal filter array. * * @return a new list over the internal filter array * @deprecated Use {@link #getFiltersArray()} */ @Deprecated public List<Filter> getFilters() { return Arrays.asList(filters); } public Filter[] getFiltersArray() { return filters; } /** * Returns whether this composite contains any filters. * * @return whether this composite contains any filters. */ public boolean isEmpty() { return this.filters.length == 0; } public int size() { return filters.length; } @Override public void start() { this.setStarting(); for (final Filter filter : filters) { filter.start(); } this.setStarted(); } @Override public void stop() { this.setStopping(); for (final Filter filter : filters) { filter.stop(); } this.setStopped(); } /** * Returns the result that should be returned when the filter does not match the event. * * @return the Result that should be returned when the filter does not match the event. */ @Override public Result getOnMismatch() { return Result.NEUTRAL; } /** * Returns the result that should be returned when the filter matches the event. * * @return the Result that should be returned when the filter matches the event. */ @Override public Result getOnMatch() { return Result.NEUTRAL; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * String text to filter on. * @param params * An array of parameters or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final String msg, final Object... params) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, params); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * Any Object. * @param t * A Throwable or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final Object msg, final Throwable t) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, t); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param logger * The Logger. * @param level * The event logging Level. * @param marker * The Marker for the event or null. * @param msg * The Message * @param t * A Throwable or null. * @return the Result. */ @Override public Result filter(final Logger logger, final Level level, final Marker marker, final Message msg, final Throwable t) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(logger, level, marker, msg, t); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } /** * Filter an event. * * @param event * The Event to filter on. * @return the Result. */ @Override public Result filter(final LogEvent event) { Result result = Result.NEUTRAL; for (int i = 0; i < filters.length; i++) { result = filters[i].filter(event); if (result == Result.ACCEPT || result == Result.DENY) { return result; } } return result; } @Override public String toString() { final StringBuilder sb = new StringBuilder(); for (int i = 0; i < filters.length; i++) { if (sb.length() == 0) { sb.append('{'); } else { sb.append(", "); } sb.append(filters[i].toString()); } if (sb.length() > 0) { sb.append('}'); } return sb.toString(); } /** * Create a CompositeFilter. * * @param filters * An array of Filters to call. * @return The CompositeFilter. */ @PluginFactory public static CompositeFilter createFilters(@PluginElement("Filters") final Filter[] filters) { return new CompositeFilter(filters); } }
LOG4J2-1278 added methods with unrolled varargs to CompositeFilter
log4j-core/src/main/java/org/apache/logging/log4j/core/filter/CompositeFilter.java
LOG4J2-1278 added methods with unrolled varargs to CompositeFilter
Java
bsd-2-clause
9045cdf926ed477eab8ae7c6b2e1813fd6b742e6
0
biovoxxel/imagej,TehSAUCE/imagej,TehSAUCE/imagej,biovoxxel/imagej,TehSAUCE/imagej,biovoxxel/imagej
// // SwingNumberSpinnerWidget.java // /* ImageJ software for multidimensional image processing and analysis. Copyright (c) 2010, ImageJDev.org. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the names of the ImageJDev.org developers nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package imagej.plugin.ui.swing; import imagej.plugin.ui.ParamDetails; import java.awt.BorderLayout; import java.awt.Dimension; import javax.swing.JSpinner; import javax.swing.SpinnerNumberModel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; /** * Swing implementation of number chooser widget, using a spinner. * * @author Curtis Rueden */ public class SwingNumberSpinnerWidget extends SwingNumberWidget implements ChangeListener { private final JSpinner spinner; public SwingNumberSpinnerWidget(final ParamDetails details, final Number min, final Number max, final Number stepSize) { super(details); final SpinnerNumberModel spinnerModel = new SpinnerNumberModel(min, (Comparable<?>) min, (Comparable<?>) max, stepSize); spinner = new JSpinner(spinnerModel); add(spinner, BorderLayout.CENTER); limitWidth(250); spinner.addChangeListener(this); refresh(); } // -- NumberWidget methods -- @Override public Number getValue() { return (Number) spinner.getValue(); } // -- InputWidget methods -- @Override public void refresh() { final Object value = details.getValue(); if (value != null) spinner.setValue(value); } // -- ChangeListener methods -- @Override public void stateChanged(final ChangeEvent e) { details.setValue(spinner.getValue()); } // -- Helper methods -- /** * Limit component width to a certain maximum. This is a HACK to work around * an issue with Double-based spinners that attempt to size themselves very * large (presumably to match Double.MAX_VALUE). */ private void limitWidth(final int maxWidth) { final Dimension spinnerSize = spinner.getPreferredSize(); if (spinnerSize.width > maxWidth) { spinnerSize.width = maxWidth; spinner.setPreferredSize(spinnerSize); spinner.setMaximumSize(spinnerSize); final Dimension widgetSize = getPreferredSize(); widgetSize.width = spinnerSize.width; setPreferredSize(widgetSize); setMaximumSize(widgetSize); } } }
ui/plugin-swing/src/main/java/imagej/plugin/ui/swing/SwingNumberSpinnerWidget.java
// // SwingNumberSpinnerWidget.java // /* ImageJ software for multidimensional image processing and analysis. Copyright (c) 2010, ImageJDev.org. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the names of the ImageJDev.org developers nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package imagej.plugin.ui.swing; import imagej.plugin.ui.ParamDetails; import java.awt.BorderLayout; import java.awt.Dimension; import javax.swing.JComponent; import javax.swing.JSpinner; import javax.swing.SpinnerNumberModel; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; /** * Swing implementation of number chooser widget, using a spinner. * * @author Curtis Rueden */ public class SwingNumberSpinnerWidget extends SwingNumberWidget implements ChangeListener { private final JSpinner spinner; public SwingNumberSpinnerWidget(final ParamDetails details, final Number min, final Number max, final Number stepSize) { super(details); final SpinnerNumberModel spinnerModel = new SpinnerNumberModel(min, (Comparable<?>) min, (Comparable<?>) max, stepSize); spinner = new JSpinner(spinnerModel); limitWidth(spinner, 300); add(spinner, BorderLayout.CENTER); spinner.addChangeListener(this); refresh(); } // -- NumberWidget methods -- @Override public Number getValue() { return (Number) spinner.getValue(); } // -- InputWidget methods -- @Override public void refresh() { final Object value = details.getValue(); if (value != null) spinner.setValue(value); } // -- ChangeListener methods -- @Override public void stateChanged(final ChangeEvent e) { details.setValue(spinner.getValue()); } // -- Helper methods -- /** * Limit component width to a certain maximum. This is a HACK to work around * an issue with Double-based spinners that attempt to size themselves very * large (presumably to match Double.MAX_VALUE). */ private void limitWidth(final JComponent c, final int maxWidth) { final Dimension prefSize = c.getPreferredSize(); if (prefSize.width > maxWidth) prefSize.width = maxWidth; c.setMaximumSize(prefSize); } }
Fix spinner max width hack to actually work. This used to be revision r2736.
ui/plugin-swing/src/main/java/imagej/plugin/ui/swing/SwingNumberSpinnerWidget.java
Fix spinner max width hack to actually work.
Java
bsd-3-clause
4d6e5811ec830bf8c0e471f4020228ef563310e4
0
ianhanniballake/ContractionTimer,ianhanniballake/ContractionTimer
package com.ianhanniballake.contractiontimer.ui; import android.annotation.TargetApi; import android.database.Cursor; import android.os.Bundle; import android.provider.BaseColumns; import android.util.Log; import android.view.ActionMode; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.AbsListView; import android.widget.AbsListView.MultiChoiceModeListener; import android.widget.Button; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.PopupMenu; import android.widget.PopupMenu.OnMenuItemClickListener; import com.ianhanniballake.contractiontimer.BuildConfig; import com.ianhanniballake.contractiontimer.R; import com.ianhanniballake.contractiontimer.analytics.AnalyticsManagerService; import com.ianhanniballake.contractiontimer.provider.ContractionContract; /** * Fragment to list contractions entered by the user */ @TargetApi(11) public class ContractionListFragmentV11 extends ContractionListFragment implements OnClickListener { /** * Helper class used to store temporary information to aid in handling * PopupMenu item selection */ static class PopupHolder { /** * A contraction's note, if any */ String existingNote; /** * Cursor id for the contraction */ long id; } /** * Key used to store the selected item note in the bundle */ private final static String SELECTED_ITEM_NOTE_KEY = "com.ianhanniballake.contractiontimer.SELECTED_ITEM_NOTE_KEY"; /** * Note associated with the currently selected item */ private String selectedItemNote = null; @Override protected void bindView(final View view, final Cursor cursor) { final Button showPopup = (Button) view.getTag(R.id.show_popup); final Object showPopupTag = showPopup.getTag(); PopupHolder popupHolder; if (showPopupTag == null) { popupHolder = new PopupHolder(); showPopup.setTag(popupHolder); } else popupHolder = (PopupHolder) showPopupTag; final int idColumnIndex = cursor.getColumnIndex(BaseColumns._ID); popupHolder.id = cursor.getLong(idColumnIndex); final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); final String note = cursor.getString(noteColumnIndex); popupHolder.existingNote = note; // Don't allow popup menu while the Contextual Action Bar is // present showPopup.setEnabled(getListView().getCheckedItemCount() == 0); } @Override public void onActivityCreated(final Bundle savedInstanceState) { if (savedInstanceState != null) selectedItemNote = savedInstanceState .getString(ContractionListFragmentV11.SELECTED_ITEM_NOTE_KEY); super.onActivityCreated(savedInstanceState); } @Override public void onClick(final View v) { final PopupMenu popup = new PopupMenu(getActivity(), v); final MenuInflater inflater = popup.getMenuInflater(); inflater.inflate(R.menu.list_context, popup.getMenu()); final PopupHolder popupHolder = (PopupHolder) v.getTag(); final MenuItem noteItem = popup.getMenu().findItem( R.id.menu_context_note); if (popupHolder.existingNote.equals("")) noteItem.setTitle(R.string.note_dialog_title_add); else noteItem.setTitle(R.string.note_dialog_title_edit); final MenuItem deleteItem = popup.getMenu().findItem( R.id.menu_context_delete); deleteItem.setTitle(getResources().getQuantityText( R.plurals.menu_context_delete, 1)); popup.setOnMenuItemClickListener(new OnMenuItemClickListener() { @Override public boolean onMenuItemClick(final MenuItem item) { switch (item.getItemId()) { case R.id.menu_context_view: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected view"); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "View"); viewContraction(popupHolder.id); return true; case R.id.menu_context_note: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected " + (popupHolder.existingNote .equals("") ? "Add Note" : "Edit Note")); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "Note", popupHolder.existingNote .equals("") ? "Add Note" : "Edit Note"); showNoteDialog(popupHolder.id, popupHolder.existingNote); return true; case R.id.menu_context_delete: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected delete"); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "Delete"); deleteContraction(popupHolder.id); return true; default: return false; } } }); popup.show(); } @Override public void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); outState.putString(ContractionListFragmentV11.SELECTED_ITEM_NOTE_KEY, selectedItemNote); } /** * Sets up the ListView for multiple item selection with the Contextual * Action Bar */ @Override protected void setupListView() { final ListView listView = getListView(); listView.setDrawSelectorOnTop(true); listView.setChoiceMode(AbsListView.CHOICE_MODE_MULTIPLE_MODAL); listView.setMultiChoiceModeListener(new MultiChoiceModeListener() { @Override public boolean onActionItemClicked(final ActionMode mode, final MenuItem item) { final long contractionId = listView.getCheckedItemIds()[0]; switch (item.getItemId()) { case R.id.menu_context_view: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected view"); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "View"); viewContraction(contractionId); return true; case R.id.menu_context_note: final int position = listView.getCheckedItemPositions() .keyAt(0); final Cursor cursor = (Cursor) listView.getAdapter() .getItem(position); final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); final String existingNote = cursor .getString(noteColumnIndex); if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected " + (existingNote.equals("") ? "Add Note" : "Edit Note")); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "Note", existingNote .equals("") ? "Add Note" : "Edit Note", position); showNoteDialog(contractionId, existingNote); mode.finish(); return true; case R.id.menu_context_delete: final long[] selectedIds = getListView() .getCheckedItemIds(); if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected delete"); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "Delete", "", selectedIds.length); for (final long id : selectedIds) deleteContraction(id); mode.finish(); return true; default: return false; } } @Override public boolean onCreateActionMode(final ActionMode mode, final Menu menu) { final MenuInflater inflater = mode.getMenuInflater(); inflater.inflate(R.menu.list_context, menu); return true; } @Override public void onDestroyActionMode(final ActionMode mode) { // Nothing to do } @Override public void onItemCheckedStateChanged(final ActionMode mode, final int position, final long id, final boolean checked) { final int selectedItemsSize = listView.getCheckedItemCount(); if (selectedItemsSize == 0) return; // This is called in the middle of the ListView's selected items // being refreshed (in a state where the getCheckedItemCount // call returns the new number of items, but the // getCheckedItemPositions() call returns the old items. // Therefore to give the ListView some time to stabilize, we // post this call to invalidate getView().post(new Runnable() { @Override public void run() { mode.invalidate(); } }); } @Override public boolean onPrepareActionMode(final ActionMode mode, final Menu menu) { final int selectedItemsSize = listView.getCheckedItemCount(); // Show or hide the view menu item final MenuItem viewItem = menu.findItem(R.id.menu_context_view); final boolean showViewItem = selectedItemsSize == 1; viewItem.setVisible(showViewItem); // Set whether to display the note menu item final MenuItem noteItem = menu.findItem(R.id.menu_context_note); final boolean showNoteItem = selectedItemsSize == 1; // Set the title of the note menu item if (showNoteItem) { final int position = listView.getCheckedItemPositions() .keyAt(0); final ListAdapter adapter = listView.getAdapter(); final Cursor cursor = position < adapter.getCount() ? (Cursor) adapter .getItem(position) : null; // The cursor will be null when first resuming the Fragment // so we'll used the selectedItemNote loaded from the Bundle if (cursor != null) { final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); selectedItemNote = cursor.getString(noteColumnIndex); } if ("".equals(selectedItemNote)) noteItem.setTitle(R.string.note_dialog_title_add); else noteItem.setTitle(R.string.note_dialog_title_edit); } noteItem.setVisible(showNoteItem); // Set the title of the delete menu item final MenuItem deleteItem = menu .findItem(R.id.menu_context_delete); final CharSequence currentTitle = deleteItem.getTitle(); final CharSequence newTitle = getResources().getQuantityText( R.plurals.menu_context_delete, selectedItemsSize); deleteItem.setTitle(newTitle); // Set the Contextual Action Bar title with the new item // size final CharSequence modeTitle = mode.getTitle(); final CharSequence newModeTitle = String.format( getString(R.string.menu_context_action_mode_title), selectedItemsSize); mode.setTitle(newModeTitle); return !newModeTitle.equals(modeTitle) || !newTitle.equals(currentTitle); } }); } @Override protected void setupNewView(final View view) { final Button showPopup = (Button) view.findViewById(R.id.show_popup); view.setTag(R.id.show_popup, showPopup); showPopup.setOnClickListener(this); } }
src/com/ianhanniballake/contractiontimer/ui/ContractionListFragmentV11.java
package com.ianhanniballake.contractiontimer.ui; import android.annotation.TargetApi; import android.database.Cursor; import android.os.Bundle; import android.provider.BaseColumns; import android.util.Log; import android.view.ActionMode; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.AbsListView; import android.widget.AbsListView.MultiChoiceModeListener; import android.widget.Button; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.PopupMenu; import android.widget.PopupMenu.OnMenuItemClickListener; import com.ianhanniballake.contractiontimer.BuildConfig; import com.ianhanniballake.contractiontimer.R; import com.ianhanniballake.contractiontimer.analytics.AnalyticsManagerService; import com.ianhanniballake.contractiontimer.provider.ContractionContract; /** * Fragment to list contractions entered by the user */ @TargetApi(11) public class ContractionListFragmentV11 extends ContractionListFragment implements OnClickListener { /** * Helper class used to store temporary information to aid in handling * PopupMenu item selection */ static class PopupHolder { /** * A contraction's note, if any */ String existingNote; /** * Cursor id for the contraction */ long id; } /** * Key used to store the selected item note in the bundle */ private final static String SELECTED_ITEM_NOTE_KEY = "com.ianhanniballake.contractiontimer.SELECTED_ITEM_NOTE_KEY"; /** * Note associated with the currently selected item */ private String selectedItemNote = null; @Override protected void bindView(final View view, final Cursor cursor) { final Button showPopup = (Button) view.getTag(R.id.show_popup); final Object showPopupTag = showPopup.getTag(); PopupHolder popupHolder; if (showPopupTag == null) { popupHolder = new PopupHolder(); showPopup.setTag(popupHolder); } else popupHolder = (PopupHolder) showPopupTag; final int idColumnIndex = cursor.getColumnIndex(BaseColumns._ID); popupHolder.id = cursor.getLong(idColumnIndex); final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); final String note = cursor.getString(noteColumnIndex); popupHolder.existingNote = note; // Don't allow popup menu while the Contextual Action Bar is // present showPopup.setEnabled(getListView().getCheckedItemCount() == 0); } @Override public void onActivityCreated(final Bundle savedInstanceState) { if (savedInstanceState != null) selectedItemNote = savedInstanceState .getString(ContractionListFragmentV11.SELECTED_ITEM_NOTE_KEY); super.onActivityCreated(savedInstanceState); } @Override public void onClick(final View v) { final PopupMenu popup = new PopupMenu(getActivity(), v); final MenuInflater inflater = popup.getMenuInflater(); inflater.inflate(R.menu.list_context, popup.getMenu()); final PopupHolder popupHolder = (PopupHolder) v.getTag(); final MenuItem noteItem = popup.getMenu().findItem( R.id.menu_context_note); if (popupHolder.existingNote.equals("")) noteItem.setTitle(R.string.note_dialog_title_add); else noteItem.setTitle(R.string.note_dialog_title_edit); final MenuItem deleteItem = popup.getMenu().findItem( R.id.menu_context_delete); deleteItem.setTitle(getResources().getQuantityText( R.plurals.menu_context_delete, 1)); popup.setOnMenuItemClickListener(new OnMenuItemClickListener() { @Override public boolean onMenuItemClick(final MenuItem item) { switch (item.getItemId()) { case R.id.menu_context_view: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected view"); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "View"); viewContraction(popupHolder.id); return true; case R.id.menu_context_note: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected " + (popupHolder.existingNote .equals("") ? "Add Note" : "Edit Note")); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "Note", popupHolder.existingNote .equals("") ? "Add Note" : "Edit Note"); showNoteDialog(popupHolder.id, popupHolder.existingNote); return true; case R.id.menu_context_delete: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Popup Menu selected delete"); AnalyticsManagerService.trackEvent(getActivity(), "PopupMenu", "Delete"); deleteContraction(popupHolder.id); return true; default: return false; } } }); popup.show(); } @Override public void onSaveInstanceState(final Bundle outState) { super.onSaveInstanceState(outState); outState.putString(ContractionListFragmentV11.SELECTED_ITEM_NOTE_KEY, selectedItemNote); } /** * Sets up the ListView for multiple item selection with the Contextual * Action Bar */ @Override protected void setupListView() { final ListView listView = getListView(); listView.setDrawSelectorOnTop(true); listView.setChoiceMode(AbsListView.CHOICE_MODE_MULTIPLE_MODAL); listView.setMultiChoiceModeListener(new MultiChoiceModeListener() { @Override public boolean onActionItemClicked(final ActionMode mode, final MenuItem item) { final long contractionId = listView.getCheckedItemIds()[0]; switch (item.getItemId()) { case R.id.menu_context_view: if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected view"); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "View"); viewContraction(contractionId); return true; case R.id.menu_context_note: final int position = listView.getCheckedItemPositions() .keyAt(0); final Cursor cursor = (Cursor) listView.getAdapter() .getItem(position); final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); final String existingNote = cursor .getString(noteColumnIndex); if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected " + (existingNote.equals("") ? "Add Note" : "Edit Note")); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "Note", existingNote .equals("") ? "Add Note" : "Edit Note", position); showNoteDialog(contractionId, existingNote); mode.finish(); return true; case R.id.menu_context_delete: final long[] selectedIds = getListView() .getCheckedItemIds(); if (BuildConfig.DEBUG) Log.d(getClass().getSimpleName(), "Context Action Mode selected delete"); AnalyticsManagerService.trackEvent(getActivity(), "ContextActionBar", "Delete", "", selectedIds.length); for (final long id : selectedIds) deleteContraction(id); mode.finish(); return true; default: return false; } } @Override public boolean onCreateActionMode(final ActionMode mode, final Menu menu) { final MenuInflater inflater = mode.getMenuInflater(); inflater.inflate(R.menu.list_context, menu); return true; } @Override public void onDestroyActionMode(final ActionMode mode) { // Nothing to do } @Override public void onItemCheckedStateChanged(final ActionMode mode, final int position, final long id, final boolean checked) { final int selectedItemsSize = listView.getCheckedItemCount(); if (selectedItemsSize == 0) return; // This is called in the middle of the ListView's selected items // being refreshed (in a state where the getCheckedItemCount // call returns the new number of items, but the // getCheckedItemPositions() call returns the old items. // Therefore to give the ListView some time to stabilize, we // post this call to invalidate getView().post(new Runnable() { @Override public void run() { mode.invalidate(); } }); } @Override public boolean onPrepareActionMode(final ActionMode mode, final Menu menu) { final int selectedItemsSize = listView.getCheckedItemCount(); // Show or hide the view menu item final MenuItem viewItem = menu.findItem(R.id.menu_context_view); final boolean showViewItem = selectedItemsSize == 1; viewItem.setVisible(showViewItem); // Set whether to display the note menu item final MenuItem noteItem = menu.findItem(R.id.menu_context_note); final boolean showNoteItem = selectedItemsSize == 1; // Set the title of the note menu item if (showNoteItem) { final int position = listView.getCheckedItemPositions() .keyAt(0); final ListAdapter adapter = listView.getAdapter(); final Cursor cursor = adapter.getCount() == 0 ? null : (Cursor) listView.getAdapter().getItem(position); // The cursor will be null when first resuming the Fragment // so we'll used the selectedItemNote loaded from the Bundle if (cursor != null) { final int noteColumnIndex = cursor .getColumnIndex(ContractionContract.Contractions.COLUMN_NAME_NOTE); selectedItemNote = cursor.getString(noteColumnIndex); } if ("".equals(selectedItemNote)) noteItem.setTitle(R.string.note_dialog_title_add); else noteItem.setTitle(R.string.note_dialog_title_edit); } noteItem.setVisible(showNoteItem); // Set the title of the delete menu item final MenuItem deleteItem = menu .findItem(R.id.menu_context_delete); final CharSequence currentTitle = deleteItem.getTitle(); final CharSequence newTitle = getResources().getQuantityText( R.plurals.menu_context_delete, selectedItemsSize); deleteItem.setTitle(newTitle); // Set the Contextual Action Bar title with the new item // size final CharSequence modeTitle = mode.getTitle(); final CharSequence newModeTitle = String.format( getString(R.string.menu_context_action_mode_title), selectedItemsSize); mode.setTitle(newModeTitle); return !newModeTitle.equals(modeTitle) || !newTitle.equals(currentTitle); } }); } @Override protected void setupNewView(final View view) { final Button showPopup = (Button) view.findViewById(R.id.show_popup); view.setTag(R.id.show_popup, showPopup); showPopup.setOnClickListener(this); } }
Fixes GH-85 - Crash List ArrayIndexOutOfBoundsException Now checks position to ensure valid getItem index. Signed-off-by: Ian Lake <0e02b6350c05034358d68ff2f908baca74d16e33@gmail.com>
src/com/ianhanniballake/contractiontimer/ui/ContractionListFragmentV11.java
Fixes GH-85 - Crash List ArrayIndexOutOfBoundsException
Java
bsd-3-clause
7cc720e6200e358bfac92f91d7d3917c486eeaba
0
lgwagner/jkind,lgwagner/jkind
package jkind.smv.visitors; import jkind.smv.SMVArrayAccessExpr; import jkind.smv.SMVArrayExpr; import jkind.smv.SMVArrayUpdateExpr; import jkind.smv.SMVBinaryExpr; import jkind.smv.SMVBoolExpr; import jkind.smv.SMVCaseExpr; import jkind.smv.SMVCastExpr; import jkind.smv.SMVFunctionCallExpr; import jkind.smv.SMVIdExpr; import jkind.smv.SMVInitIdExpr; import jkind.smv.SMVIntExpr; import jkind.smv.SMVNextIdExpr; import jkind.smv.SMVRealExpr; import jkind.smv.SMVUnaryExpr; public interface SMVExprVisitor<T> { public T visit(SMVArrayAccessExpr e); public T visit(SMVArrayExpr e); public T visit(SMVArrayUpdateExpr e); public T visit(SMVBinaryExpr e); public T visit(SMVBoolExpr e); public T visit(SMVCastExpr e); // public T visit(CondactExpr e); public T visit(SMVFunctionCallExpr e); public T visit(SMVIdExpr e); public T visit(SMVInitIdExpr e); public T visit(SMVNextIdExpr e); public T visit(SMVCaseExpr e); public T visit(SMVIntExpr e); // public T visit(SMVModuleCallExpr e); public T visit(SMVRealExpr e); // public T visit(RecordAccessExpr e); // public T visit(RecordExpr e); // public T visit(RecordUpdateExpr e); // public T visit(TupleExpr e); public T visit(SMVUnaryExpr e); }
jkind-common/src/jkind/smv/visitors/SMVExprVisitor.java
package jkind.smv.visitors; import jkind.smv.SMVBinaryExpr; import jkind.smv.SMVBoolExpr; import jkind.smv.SMVCaseExpr; import jkind.smv.SMVCastExpr; import jkind.smv.SMVFunctionCallExpr; import jkind.smv.SMVIdExpr; import jkind.smv.SMVInitIdExpr; import jkind.smv.SMVIntExpr; import jkind.smv.SMVNextIdExpr; import jkind.smv.SMVRealExpr; import jkind.smv.SMVUnaryExpr; public interface SMVExprVisitor<T> { // public T visit(ArrayAccessExpr e); // public T visit(ArrayExpr e); // public T visit(ArrayUpdateExpr e); public T visit(SMVBinaryExpr e); public T visit(SMVBoolExpr e); public T visit(SMVCastExpr e); // public T visit(CondactExpr e); public T visit(SMVFunctionCallExpr e); public T visit(SMVIdExpr e); public T visit(SMVInitIdExpr e); public T visit(SMVNextIdExpr e); public T visit(SMVCaseExpr e); public T visit(SMVIntExpr e); // public T visit(SMVNodeCallExpr e); public T visit(SMVRealExpr e); // public T visit(RecordAccessExpr e); // public T visit(RecordExpr e); // public T visit(RecordUpdateExpr e); // public T visit(TupleExpr e); public T visit(SMVUnaryExpr e); }
Changes related to array translation.
jkind-common/src/jkind/smv/visitors/SMVExprVisitor.java
Changes related to array translation.
Java
bsd-3-clause
06dfea138a5b64a60991f65c14a9bb1087ea1ecb
0
jjfiv/galago-git,jjfiv/galago-git,jjfiv/galago-git
// BSD License (http://lemurproject.org/galago-license) package org.lemurproject.galago.core.retrieval.query; import javax.annotation.Nonnull; import java.io.Serializable; import java.text.DecimalFormat; import java.util.*; /** * <p>Node represents a single node in a query parse tree.</p> * * <p>In Galago, queries are parsed into a tree of Nodes. The query tree can * then be modified using StructuredQuery.copy, or analyzed by using * StructuredQuery.walk. Once the query is in the proper form, the query is * converted into a tree of iterators that can be evaluated.</p> * * @author trevor, sjh */ public class Node extends AbstractList<Node> implements Serializable { private static final Set<String> defaultOmissionSet; static { defaultOmissionSet = new HashSet<>(); defaultOmissionSet.add("lengths"); defaultOmissionSet.add("passagelengths"); defaultOmissionSet.add("passagefilter"); defaultOmissionSet.add("part"); } /// The query operator represented by this node, like "combine", "weight", "syn", etc. private String operator; /// Child nodes of the operator, e.g. in #combine(a b), 'a' and 'b' are internal nodes of #combine. private List<Node> internalNodes; // Parent node - null if it is root private Node parent; // The position in the text string where this operator starts. Useful for parse error messages. private int position; /// Additional nodeParameters for this operator; usually these are term statistics and smoothing nodeParameters. private NodeParameters nodeParameters; private static final long serialVersionUID = 4553653651892088433L; public Node() { this("", new NodeParameters(), new ArrayList<Node>(), 0); } @Override public int size() { return this.internalNodes.size(); } @Override public Node get(int index) { return this.internalNodes.get(index); } @Override public boolean add(Node n) { this.addChild(n); return true; } @Override public boolean addAll(@Nonnull Collection<? extends Node> nchildren) { for (Node nchild : nchildren) { this.addChild(nchild); } return true; } public Node(String operator) { this(operator, new NodeParameters(), new ArrayList<Node>(), 0); } public Node(String operator, List<Node> internalNodes) { this(operator, new NodeParameters(), internalNodes, 0); } public Node(String operator, List<Node> internalNodes, int position) { this(operator, new NodeParameters(), internalNodes, position); } public Node(String operator, String argument) { this(operator, new NodeParameters(argument), new ArrayList<Node>(), 0); } public Node(String operator, String argument, int position) { this(operator, new NodeParameters(argument), new ArrayList<Node>(), position); } public Node(String operator, String argument, List<Node> internalNodes) { this(operator, new NodeParameters(argument), internalNodes, 0); } public Node(String operator, NodeParameters np) { this(operator, np, new ArrayList<Node>(), 0); } public Node(String operator, NodeParameters np, List<Node> internalNodes) { this(operator, np, internalNodes, 0); } public Node(String operator, String argument, List<Node> internalNodes, int position) { this(operator, new NodeParameters(argument), internalNodes, position); } public Node(String operator, NodeParameters nodeParameters, List<Node> internalNodes, int position) { this.operator = operator; this.position = position; this.nodeParameters = nodeParameters; this.parent = null; this.internalNodes = new ArrayList<>(); for (Node c : internalNodes) { addChild(c); } } /** * Deep-clones this Node. Be aware this clones the *entire* subtree rooted at * this node, therefore all descendants are also cloned. * */ @Override public Node clone() { return new Node(operator, nodeParameters.clone(), cloneNodeList(this.internalNodes), position); } public String getDefaultParameter() { return nodeParameters.get("default", null); } public String getOperator() { return operator; } public void setOperator(String op) { this.operator = op; } public void clearChildren() { internalNodes.clear(); } public void removeChildAt(int i) { Node child = internalNodes.remove(i); if (child != null) { assert (child.parent == this); child.parent = null; } } public void replaceChildAt(Node newChild, int i) { assert (i > -1 && i < internalNodes.size()); newChild.parent = this; Node oldChild = internalNodes.set(i, newChild); if (oldChild != newChild) { oldChild.parent = null; } } public void removeChild(Node child) { assert (child.parent == this); child.parent = null; internalNodes.remove(child); } public void addChild(Node child) { this.addChild(child, -1); } public void addChild(Node child, int pos) { // link to this parent child.parent = this; if (pos < internalNodes.size() && pos > -1) { internalNodes.add(pos, child); } else { internalNodes.add(child); } } public Node getChild(int index) { return internalNodes.get(index); } public int numChildren() { return this.internalNodes.size(); } public Iterator<Node> getChildIterator() { return internalNodes.iterator(); } public List<Node> getInternalNodes() { return Collections.unmodifiableList(this.internalNodes); } public int getPosition() { return position; } public NodeParameters getNodeParameters() { return nodeParameters; } public Node getParent() { return parent; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toString()); if (internalNodes.size() == 0) { builder.append("()"); } else { builder.append("( "); for (Node child : internalNodes) { builder.append(child.toString()); builder.append(' '); } builder.append(")"); } return builder.toString(); } public String toPrettyString() { return toPrettyString(""); } public String toPrettyString(String indent) { StringBuilder builder = new StringBuilder(); builder.append(indent); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toString()); if (internalNodes.size() == 0) { builder.append("()\n"); } else { builder.append("(\n"); for (Node child : internalNodes) { builder.append(child.toPrettyString(indent + " ")); } builder.append(indent).append(" ").append(")\n"); } return builder.toString(); } public String toSimplePrettyString() { return toSimplePrettyString("", defaultOmissionSet, ""); } public String toSimplePrettyString(Set<String> ignoreParams) { return toSimplePrettyString("", ignoreParams, ""); } public String toSimplePrettyString(String indent, Set<String> ignoreParams) { return toSimplePrettyString(indent, ignoreParams, ""); } public String toSimplePrettyString(String indent, Set<String> ignoreParams, String addOnString) { StringBuilder builder = new StringBuilder(); if (ignoreParams.contains(operator)) { if (internalNodes.size() != 0) { for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent, ignoreParams, addOnString); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } } } else { if (operator.equals("combine")) { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); ArrayList<String> combineWeightList = nodeParameters.collectCombineWeightList(); if (combineWeightList.size() == 0 && internalNodes.size() > 0) { DecimalFormat formatter = new DecimalFormat("###.#####"); String weightString = formatter.format(1.0 / internalNodes.size()); int firstNonZeroIndex = -1; for (int i = 0; i < weightString.length(); i++) { if (weightString.charAt(i) != '0' && weightString.charAt(i) != '.') { firstNonZeroIndex = i; break; } } if (firstNonZeroIndex != -1) { if (weightString.length() >= 5) { firstNonZeroIndex = firstNonZeroIndex >= 4 ? firstNonZeroIndex : 4; weightString = weightString.substring(0, firstNonZeroIndex + 1); } } for (Node internalNode : internalNodes) { combineWeightList.add(weightString); } } //builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); builder.append("(\n"); for (int i = 0; i < internalNodes.size(); i++) { Node child = internalNodes.get(i); String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { if (i < combineWeightList.size()) { builder.append(child.toSimplePrettyString(indent + " ", ignoreParams, combineWeightList.get(i) + "\t")); } else { builder.append(child.toSimplePrettyString(indent + " ", ignoreParams, "")); } builder.append("\n"); } } builder.append(indent).append(")"); } else if (operator.equals("unordered") || operator.equals("ordered") || operator.equals("syn")) { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); builder.append("("); for (Node child : internalNodes) { String childString = child.toSimplePrettyString("", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString).append(" "); } } builder.append(")"); } else if (operator.equals("extents") || operator.equals("counts")) { builder.append(indent); builder.append(addOnString); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } } else { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); if (internalNodes.size() > 0) { builder.append("("); for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } builder.append(indent).append(" ").append(")"); } } } return builder.toString(); } @Override public boolean equals(Object o) { if (!(o instanceof Node)) { return false; } if (o == this) { return true; } Node other = (Node) o; return Objects.equals(operator, other.getOperator()) && Objects.equals(this.getNodeParameters(), other.getNodeParameters()) && this.internalNodes.equals(other.internalNodes); } @Override public int hashCode() { int hash = 7; String defp = this.getNodeParameters().getAsString("default"); hash = 67 * hash + (this.operator != null ? this.operator.hashCode() : 0); hash = 67 * hash + (defp != null ? defp.hashCode() : 0); hash = 67 * hash + (this.internalNodes != null ? this.internalNodes.hashCode() : 0); return hash; } public static List<Node> cloneNodeList(List<Node> nodeList) { ArrayList<Node> newNodes = new ArrayList<>(); for (Node n : nodeList) { newNodes.add(n.clone()); } return newNodes; } public void addTerms(List<String> terms) { for (String term : terms) { addChild(Node.Text(term)); } } /** Build a text node at the first position */ public static Node Text(String text) { return Text(text, 0); } /** Build a text node at the given position */ public static Node Text(String text, int position) { return new Node("text", new NodeParameters(text), new ArrayList<Node>(), position); } public boolean isText() { return operator.equals("extents") || operator.equals("counts") || operator.equals("text"); } }
core/src/main/java/org/lemurproject/galago/core/retrieval/query/Node.java
// BSD License (http://lemurproject.org/galago-license) package org.lemurproject.galago.core.retrieval.query; import javax.annotation.Nonnull; import java.io.Serializable; import java.text.DecimalFormat; import java.util.*; /** * <p>Node represents a single node in a query parse tree.</p> * * <p>In Galago, queries are parsed into a tree of Nodes. The query tree can * then be modified using StructuredQuery.copy, or analyzed by using * StructuredQuery.walk. Once the query is in the proper form, the query is * converted into a tree of iterators that can be evaluated.</p> * * @author trevor, sjh */ public class Node extends AbstractList<Node> implements Serializable { private static final Set<String> defaultOmissionSet; static { defaultOmissionSet = new HashSet<>(); defaultOmissionSet.add("lengths"); defaultOmissionSet.add("passagelengths"); defaultOmissionSet.add("passagefilter"); defaultOmissionSet.add("part"); } /// The query operator represented by this node, like "combine", "weight", "syn", etc. private String operator; /// Child nodes of the operator, e.g. in #combine(a b), 'a' and 'b' are internal nodes of #combine. private List<Node> internalNodes; // Parent node - null if it is root private Node parent; // The position in the text string where this operator starts. Useful for parse error messages. private int position; /// Additional nodeParameters for this operator; usually these are term statistics and smoothing nodeParameters. private NodeParameters nodeParameters; private static final long serialVersionUID = 4553653651892088433L; public Node() { this("", new NodeParameters(), new ArrayList<Node>(), 0); } @Override public int size() { return this.internalNodes.size(); } @Override public Node get(int index) { return this.internalNodes.get(index); } @Override public boolean add(Node n) { this.addChild(n); return true; } @Override public boolean addAll(@Nonnull Collection<? extends Node> nchildren) { for (Node nchild : nchildren) { this.addChild(nchild); } return true; } public Node(String operator) { this(operator, new NodeParameters(), new ArrayList<Node>(), 0); } public Node(String operator, List<Node> internalNodes) { this(operator, new NodeParameters(), internalNodes, 0); } public Node(String operator, List<Node> internalNodes, int position) { this(operator, new NodeParameters(), internalNodes, position); } public Node(String operator, String argument) { this(operator, new NodeParameters(argument), new ArrayList<Node>(), 0); } public Node(String operator, String argument, int position) { this(operator, new NodeParameters(argument), new ArrayList<Node>(), position); } public Node(String operator, String argument, List<Node> internalNodes) { this(operator, new NodeParameters(argument), internalNodes, 0); } public Node(String operator, NodeParameters np) { this(operator, np, new ArrayList<Node>(), 0); } public Node(String operator, NodeParameters np, List<Node> internalNodes) { this(operator, np, internalNodes, 0); } public Node(String operator, String argument, List<Node> internalNodes, int position) { this(operator, new NodeParameters(argument), internalNodes, position); } public Node(String operator, NodeParameters nodeParameters, List<Node> internalNodes, int position) { this.operator = operator; this.position = position; this.nodeParameters = nodeParameters; this.parent = null; this.internalNodes = new ArrayList<>(); for (Node c : internalNodes) { addChild(c); } } /** * Deep-clones this Node. Be aware this clones the *entire* subtree rooted at * this node, therefore all descendants are also cloned. * */ @Override public Node clone() { return new Node(operator, nodeParameters.clone(), cloneNodeList(this.internalNodes), position); } public String getDefaultParameter() { return nodeParameters.get("default", null); } public String getOperator() { return operator; } public void setOperator(String op) { this.operator = op; } public void clearChildren() { internalNodes.clear(); } public void removeChildAt(int i) { Node child = internalNodes.remove(i); if (child != null) { assert (child.parent == this); child.parent = null; } } public void replaceChildAt(Node newChild, int i) { assert (i > -1 && i < internalNodes.size()); newChild.parent = this; Node oldChild = internalNodes.set(i, newChild); if (oldChild != newChild) { oldChild.parent = null; } } public void removeChild(Node child) { assert (child.parent == this); child.parent = null; internalNodes.remove(child); } public void addChild(Node child) { this.addChild(child, -1); } public void addChild(Node child, int pos) { // link to this parent child.parent = this; if (pos < internalNodes.size() && pos > -1) { internalNodes.add(pos, child); } else { internalNodes.add(child); } } public Node getChild(int index) { return internalNodes.get(index); } public int numChildren() { return this.internalNodes.size(); } public Iterator<Node> getChildIterator() { return internalNodes.iterator(); } public List<Node> getInternalNodes() { return Collections.unmodifiableList(this.internalNodes); } public int getPosition() { return position; } public NodeParameters getNodeParameters() { return nodeParameters; } public Node getParent() { return parent; } @Override public String toString() { StringBuilder builder = new StringBuilder(); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toString()); if (internalNodes.size() == 0) { builder.append("()"); } else { builder.append("( "); for (Node child : internalNodes) { builder.append(child.toString()); builder.append(' '); } builder.append(")"); } return builder.toString(); } public String toPrettyString() { return toPrettyString(""); } public String toPrettyString(String indent) { StringBuilder builder = new StringBuilder(); builder.append(indent); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toString()); if (internalNodes.size() == 0) { builder.append("()\n"); } else { builder.append("(\n"); for (Node child : internalNodes) { builder.append(child.toPrettyString(indent + " ")); } builder.append(indent).append(" ").append(")\n"); } return builder.toString(); } public String toSimplePrettyString() { return toSimplePrettyString("", defaultOmissionSet, ""); } public String toSimplePrettyString(Set<String> ignoreParams) { return toSimplePrettyString("", ignoreParams, ""); } public String toSimplePrettyString(String indent, Set<String> ignoreParams) { return toSimplePrettyString(indent, ignoreParams, ""); } public String toSimplePrettyString(String indent, Set<String> ignoreParams, String addOnString) { StringBuilder builder = new StringBuilder(); if (ignoreParams.contains(operator)) { if (internalNodes.size() != 0) { for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent, ignoreParams, addOnString); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } } } else { if (operator.equals("combine")) { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); ArrayList<String> combineWeightList = nodeParameters.collectCombineWeightList(); if (combineWeightList.size() == 0 && internalNodes.size() > 0) { DecimalFormat formatter = new DecimalFormat("###.#####"); String weightString = formatter.format(1.0 / internalNodes.size()); int firstNonZeroIndex = -1; for (int i = 0; i < weightString.length(); i++) { if (weightString.charAt(i) != '0' && weightString.charAt(i) != '.') { firstNonZeroIndex = i; break; } } if (firstNonZeroIndex != -1) { if (weightString.length() >= 5) { firstNonZeroIndex = firstNonZeroIndex >= 4 ? firstNonZeroIndex : 4; weightString = weightString.substring(0, firstNonZeroIndex + 1); } } for (Node internalNode : internalNodes) { combineWeightList.add(weightString); } } //builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); builder.append("(\n"); for (int i = 0; i < internalNodes.size(); i++) { Node child = internalNodes.get(i); String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { if (i < combineWeightList.size()) { builder.append(child.toSimplePrettyString(indent + " ", ignoreParams, combineWeightList.get(i) + "\t")); } else { builder.append(child.toSimplePrettyString(indent + " ", ignoreParams, "")); } builder.append("\n"); } } builder.append(indent).append(")"); } else if (operator.equals("unordered") || operator.equals("ordered") || operator.equals("syn")) { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); builder.append("("); for (Node child : internalNodes) { String childString = child.toSimplePrettyString("", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString).append(" "); } } builder.append(")"); } else if (operator.equals("extents") || operator.equals("counts")) { builder.append(indent); builder.append(addOnString); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } } else { builder.append(indent); builder.append(addOnString); builder.append('#'); assert !operator.contains(":") && !operator.contains("(") : "Operator can not contain ':' or '('."; builder.append(operator); builder.append(nodeParameters.toSimpleString(ignoreParams, operator)); if (internalNodes.size() > 0) { builder.append("("); for (Node child : internalNodes) { String childString = child.toSimplePrettyString(indent + " ", ignoreParams, ""); if (!childString.replaceAll("\\s", "").equals("")) { builder.append(childString); } } builder.append(indent).append(" ").append(")"); } } } return builder.toString(); } @Override public boolean equals(Object o) { if (!(o instanceof Node)) { return false; } if (o == this) { return true; } Node other = (Node) o; return Objects.equals(operator, other.getOperator()) && Objects.equals(this.getNodeParameters(), other.getNodeParameters()) && this.internalNodes.equals(other.internalNodes); } @Override public int hashCode() { int hash = 7; hash = 67 * hash + (this.operator != null ? this.operator.hashCode() : 0); hash = 67 * hash + (this.internalNodes != null ? this.internalNodes.hashCode() : 0); return hash; } public static List<Node> cloneNodeList(List<Node> nodeList) { ArrayList<Node> newNodes = new ArrayList<>(); for (Node n : nodeList) { newNodes.add(n.clone()); } return newNodes; } public void addTerms(List<String> terms) { for (String term : terms) { addChild(Node.Text(term)); } } /** Build a text node at the first position */ public static Node Text(String text) { return Text(text, 0); } /** Build a text node at the given position */ public static Node Text(String text, int position) { return new Node("text", new NodeParameters(text), new ArrayList<Node>(), position); } public boolean isText() { return operator.equals("extents") || operator.equals("counts") || operator.equals("text"); } }
Make Node.hashCode be a lot less terrible by including its default parameter, for text terms especially
core/src/main/java/org/lemurproject/galago/core/retrieval/query/Node.java
Make Node.hashCode be a lot less terrible by including its default parameter, for text terms especially
Java
bsd-3-clause
b85a7a9b17b0f10a8d2208dafd20379257a00ae0
0
eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j,eclipse/rdf4j
/******************************************************************************* * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.rio.ntriples; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.eclipse.rdf4j.model.BNode; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.util.Literals; import org.eclipse.rdf4j.model.vocabulary.XMLSchema; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFWriter; import org.eclipse.rdf4j.rio.RioSetting; import org.eclipse.rdf4j.rio.helpers.AbstractRDFWriter; import org.eclipse.rdf4j.rio.helpers.BasicWriterSettings; import org.eclipse.rdf4j.rio.helpers.NTriplesWriterSettings; /** * An implementation of the RDFWriter interface that writes RDF documents in N-Triples format. The N-Triples * format is defined in <a href="http://www.w3.org/TR/rdf-testcases/#ntriples">this section</a> of the RDF * Test Cases document. */ public class NTriplesWriter extends AbstractRDFWriter implements RDFWriter { /*-----------* * Variables * *-----------*/ protected final Writer writer; protected boolean writingStarted; private boolean xsdStringToPlainLiteral = true; private boolean escapeUnicode; /*--------------* * Constructors * *--------------*/ /** * Creates a new NTriplesWriter that will write to the supplied OutputStream. * * @param out * The OutputStream to write the N-Triples document to. */ public NTriplesWriter(OutputStream out) { this(new OutputStreamWriter(out, Charset.forName("UTF-8"))); } /** * Creates a new NTriplesWriter that will write to the supplied Writer. * * @param writer * The Writer to write the N-Triples document to. */ public NTriplesWriter(Writer writer) { this.writer = writer; writingStarted = false; } /*---------* * Methods * *---------*/ @Override public RDFFormat getRDFFormat() { return RDFFormat.NTRIPLES; } @Override public void startRDF() throws RDFHandlerException { if (writingStarted) { throw new RuntimeException("Document writing has already started"); } writingStarted = true; xsdStringToPlainLiteral = getWriterConfig().get(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL); escapeUnicode = getWriterConfig().get(NTriplesWriterSettings.ESCAPE_UNICODE); } @Override public void endRDF() throws RDFHandlerException { if (!writingStarted) { throw new RuntimeException("Document writing has not yet started"); } try { writer.flush(); } catch (IOException e) { throw new RDFHandlerException(e); } finally { writingStarted = false; } } @Override public void handleNamespace(String prefix, String name) { // N-Triples does not support namespace prefixes. } @Override public void handleStatement(Statement st) throws RDFHandlerException { if (!writingStarted) { throw new RuntimeException("Document writing has not yet been started"); } try { writeValue(st.getSubject()); writer.write(" "); writeIRI(st.getPredicate()); writer.write(" "); writeValue(st.getObject()); writer.write(" .\n"); } catch (IOException e) { throw new RDFHandlerException(e); } } @Override public void handleComment(String comment) throws RDFHandlerException { try { writer.write("# "); writer.write(comment); writer.write("\n"); } catch (IOException e) { throw new RDFHandlerException(e); } } @Override public final Collection<RioSetting<?>> getSupportedSettings() { Set<RioSetting<?>> result = new HashSet<RioSetting<?>>(super.getSupportedSettings()); result.add(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL); result.add(NTriplesWriterSettings.ESCAPE_UNICODE); return result; } /** * Writes the N-Triples representation of the given {@link Value}. * * @param value * The value to write. * @throws IOException */ protected void writeValue(Value value) throws IOException { if (value instanceof IRI) { writeIRI((IRI)value); } else if (value instanceof BNode) { writeBNode((BNode)value); } else if (value instanceof Literal) { writeLiteral((Literal)value); } else { throw new IllegalArgumentException("Unknown value type: " + value.getClass()); } } private void writeIRI(IRI iri) throws IOException { writer.append("<"); writeString(iri.stringValue()); writer.append(">"); } private void writeBNode(BNode bNode) throws IOException { String nextId = bNode.getID(); writer.append("_:"); if (nextId.isEmpty()) { writer.append("genid"); writer.append(Integer.toHexString(bNode.hashCode())); } else { if (!NTriplesUtil.isLetter(nextId.charAt(0))) { writer.append("genid"); writer.append(Integer.toHexString(nextId.charAt(0))); } for (int i = 0; i < nextId.length(); i++) { if (NTriplesUtil.isLetterOrNumber(nextId.charAt(i))) { writer.append(nextId.charAt(i)); } else { // Append the character as its hex representation writer.append(Integer.toHexString(nextId.charAt(i))); } } } } /** * Write the N-Triples representation of the given {@link Literal}, optionally ignoring the xsd:string * datatype as it is implied for RDF-1.1. * * @param lit * The literal to write. * @throws IOException */ private void writeLiteral(Literal lit) throws IOException { // Do some character escaping on the label: writer.append("\""); writeString(lit.getLabel()); writer.append("\""); if (Literals.isLanguageLiteral(lit)) { // Append the literal's language writer.append("@"); writer.append(lit.getLanguage().get()); } else { // SES-1917 : In RDF-1.1, all literals have a type, and if they are not // language literals we display the type for backwards compatibility IRI datatype = lit.getDatatype(); if (!datatype.equals(XMLSchema.STRING) || !xsdStringToPlainLiteral) { writer.append("^^"); writeIRI(lit.getDatatype()); } } } /** * Writes a Unicode string to an N-Triples compatible character sequence. Any special characters are * escaped using backslashes (<tt>"</tt> becomes <tt>\"</tt>, etc.), and non-ascii/non-printable * characters are escaped using Unicode escapes (<tt>&#x5C;uxxxx</tt> and <tt>&#x5C;Uxxxxxxxx</tt>) if the * writer config is enabled. * * @throws IOException */ private void writeString(String label) throws IOException { NTriplesUtil.escapeString(label, writer, escapeUnicode); } }
core/rio/ntriples/src/main/java/org/eclipse/rdf4j/rio/ntriples/NTriplesWriter.java
/******************************************************************************* * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Distribution License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/org/documents/edl-v10.php. *******************************************************************************/ package org.eclipse.rdf4j.rio.ntriples; import java.io.IOException; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.nio.charset.Charset; import java.util.Collection; import java.util.HashSet; import java.util.Set; import org.eclipse.rdf4j.model.BNode; import org.eclipse.rdf4j.model.IRI; import org.eclipse.rdf4j.model.Literal; import org.eclipse.rdf4j.model.Statement; import org.eclipse.rdf4j.model.Value; import org.eclipse.rdf4j.model.util.Literals; import org.eclipse.rdf4j.model.vocabulary.XMLSchema; import org.eclipse.rdf4j.rio.RDFFormat; import org.eclipse.rdf4j.rio.RDFHandlerException; import org.eclipse.rdf4j.rio.RDFWriter; import org.eclipse.rdf4j.rio.RioSetting; import org.eclipse.rdf4j.rio.helpers.AbstractRDFWriter; import org.eclipse.rdf4j.rio.helpers.BasicWriterSettings; import org.eclipse.rdf4j.rio.helpers.NTriplesWriterSettings; /** * An implementation of the RDFWriter interface that writes RDF documents in N-Triples format. The N-Triples * format is defined in <a href="http://www.w3.org/TR/rdf-testcases/#ntriples">this section</a> of the RDF * Test Cases document. */ public class NTriplesWriter extends AbstractRDFWriter implements RDFWriter { /*-----------* * Variables * *-----------*/ protected final Writer writer; protected boolean writingStarted; private Boolean xsdStringToPlainLiteral; private Boolean escapeUnicode; /*--------------* * Constructors * *--------------*/ /** * Creates a new NTriplesWriter that will write to the supplied OutputStream. * * @param out * The OutputStream to write the N-Triples document to. */ public NTriplesWriter(OutputStream out) { this(new OutputStreamWriter(out, Charset.forName("UTF-8"))); } /** * Creates a new NTriplesWriter that will write to the supplied Writer. * * @param writer * The Writer to write the N-Triples document to. */ public NTriplesWriter(Writer writer) { this.writer = writer; writingStarted = false; } /*---------* * Methods * *---------*/ @Override public RDFFormat getRDFFormat() { return RDFFormat.NTRIPLES; } @Override public void startRDF() throws RDFHandlerException { if (writingStarted) { throw new RuntimeException("Document writing has already started"); } writingStarted = true; xsdStringToPlainLiteral = getWriterConfig().get(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL); escapeUnicode = getWriterConfig().get(NTriplesWriterSettings.ESCAPE_UNICODE); } @Override public void endRDF() throws RDFHandlerException { if (!writingStarted) { throw new RuntimeException("Document writing has not yet started"); } try { writer.flush(); } catch (IOException e) { throw new RDFHandlerException(e); } finally { writingStarted = false; } } @Override public void handleNamespace(String prefix, String name) { // N-Triples does not support namespace prefixes. } @Override public void handleStatement(Statement st) throws RDFHandlerException { if (!writingStarted) { throw new RuntimeException("Document writing has not yet been started"); } try { writeValue(st.getSubject()); writer.write(" "); writeIRI(st.getPredicate()); writer.write(" "); writeValue(st.getObject()); writer.write(" .\n"); } catch (IOException e) { throw new RDFHandlerException(e); } } @Override public void handleComment(String comment) throws RDFHandlerException { try { writer.write("# "); writer.write(comment); writer.write("\n"); } catch (IOException e) { throw new RDFHandlerException(e); } } @Override public final Collection<RioSetting<?>> getSupportedSettings() { Set<RioSetting<?>> result = new HashSet<RioSetting<?>>(super.getSupportedSettings()); result.add(BasicWriterSettings.XSD_STRING_TO_PLAIN_LITERAL); result.add(NTriplesWriterSettings.ESCAPE_UNICODE); return result; } /** * Writes the N-Triples representation of the given {@link Value}. * * @param value * The value to write. * @throws IOException */ protected void writeValue(Value value) throws IOException { if (value instanceof IRI) { writeIRI((IRI)value); } else if (value instanceof BNode) { writeBNode((BNode)value); } else if (value instanceof Literal) { writeLiteral((Literal)value); } else { throw new IllegalArgumentException("Unknown value type: " + value.getClass()); } } private void writeIRI(IRI Iri) throws IOException { writer.append("<"); writeString(Iri.stringValue()); writer.append(">"); } private void writeBNode(BNode bNode) throws IOException { String nextId = bNode.getID(); writer.append("_:"); if (nextId.isEmpty()) { writer.append("genid"); writer.append(Integer.toHexString(bNode.hashCode())); } else { if (!NTriplesUtil.isLetter(nextId.charAt(0))) { writer.append("genid"); writer.append(Integer.toHexString(nextId.charAt(0))); } for (int i = 0; i < nextId.length(); i++) { if (NTriplesUtil.isLetterOrNumber(nextId.charAt(i))) { writer.append(nextId.charAt(i)); } else { // Append the character as its hex representation writer.append(Integer.toHexString(nextId.charAt(i))); } } } } /** * Write the N-Triples representation of the given {@link Literal}, optionally ignoring the xsd:string * datatype as it is implied for RDF-1.1. * * @param lit * The literal to write. * @throws IOException */ private void writeLiteral(Literal lit) throws IOException { // Do some character escaping on the label: writer.append("\""); writeString(lit.getLabel()); writer.append("\""); if (Literals.isLanguageLiteral(lit)) { // Append the literal's language writer.append("@"); writer.append(lit.getLanguage().get()); } else { // SES-1917 : In RDF-1.1, all literals have a type, and if they are not // language literals we display the type for backwards compatibility IRI datatype = lit.getDatatype(); if (!datatype.equals(XMLSchema.STRING) || !xsdStringToPlainLiteral) { writer.append("^^"); writeIRI(lit.getDatatype()); } } } /** * Writes a Unicode string to an N-Triples compatible character sequence. Any special characters are * escaped using backslashes (<tt>"</tt> becomes <tt>\"</tt>, etc.), and non-ascii/non-printable * characters are escaped using Unicode escapes (<tt>&#x5C;uxxxx</tt> and <tt>&#x5C;Uxxxxxxxx</tt>) if the * writer config is enabled. * * @throws IOException */ private void writeString(String label) throws IOException { NTriplesUtil.escapeString(label, writer, escapeUnicode); } }
Issue #850: Use primitive bools Signed-off-by: James Leigh <0f1692401631e7c7273a0e49d197134b31ac8865@ontotext.com>
core/rio/ntriples/src/main/java/org/eclipse/rdf4j/rio/ntriples/NTriplesWriter.java
Issue #850: Use primitive bools
Java
mit
691118c20694665c3683e9c0e37b9573424b76e1
0
DanilaFe/abacus,DanilaFe/abacus
package org.nwapw.abacus.plugin; import org.nwapw.abacus.function.Function; import org.nwapw.abacus.function.Operator; import org.nwapw.abacus.number.NaiveNumber; import org.nwapw.abacus.number.NumberInterface; import java.lang.reflect.InvocationTargetException; import java.util.*; /** * A class that controls instances of plugins, allowing for them * to interact with each other and the calculator. */ public class PluginManager { /** * List of classes loaded by this manager. */ private Set<Class<?>> loadedPluginClasses; /** * A list of loaded plugins. */ private Set<Plugin> plugins; /** * List of functions that have been cached, * that is, found in a plugin and returned. */ private Map<String, Function> cachedFunctions; /** * List of operators that have been cached, * that is, found in a plugin and returned. */ private Map<String, Operator> cachedOperators; /** * List of registered number implementations that have * been cached, that is, found in a plugin and returned. */ private Map<String, Class<? extends NumberInterface>> cachedNumbers; /** * List of registered constant providers for every * number class. */ private Map<Class<?>, java.util.function.Function<String, NaiveNumber>> cachedConstantProviders; /** * List of all functions loaded by the plugins. */ private Set<String> allFunctions; /** * List of all operators loaded by the plugins. */ private Set<String> allOperators; /** * List of all numbers loaded by the plugins. */ private Set<String> allNumbers; /** * The list of plugin listeners attached to this instance. */ private Set<PluginListener> listeners; /** * Creates a new plugin manager. */ public PluginManager() { loadedPluginClasses = new HashSet<>(); plugins = new HashSet<>(); cachedFunctions = new HashMap<>(); cachedOperators = new HashMap<>(); cachedNumbers = new HashMap<>(); cachedConstantProviders = new HashMap<>(); allFunctions = new HashSet<>(); allOperators = new HashSet<>(); allNumbers = new HashSet<>(); listeners = new HashSet<>(); } /** * Searches the plugin list for a certain value, retrieving the Plugin's * list of items of the type using the setFunction and getting the value * of it is available via getFunction. If the value is contained * in the cache, it returns the cached value instead. * * @param plugins the plugin list to search. * @param cache the cache to use * @param setFunction the function to retrieve a set of available T's from the plugin * @param getFunction the function to get the T value under the given name * @param name the name to search for * @param <T> the type of element being search * @return the retrieved element, or null if it was not found. */ private static <T, K> T searchCached(Collection<Plugin> plugins, Map<K, T> cache, java.util.function.Function<Plugin, Set<K>> setFunction, java.util.function.BiFunction<Plugin, K, T> getFunction, K name) { if (cache.containsKey(name)) return cache.get(name); T loadedValue = null; for (Plugin plugin : plugins) { if (setFunction.apply(plugin).contains(name)) { loadedValue = getFunction.apply(plugin, name); break; } } cache.put(name, loadedValue); return loadedValue; } /** * Gets a function under the given name. * * @param name the name of the function * @return the function under the given name. */ public Function functionFor(String name) { return searchCached(plugins, cachedFunctions, Plugin::providedFunctions, Plugin::getFunction, name); } /** * Gets an operator under the given name. * * @param name the name of the operator. * @return the operator under the given name. */ public Operator operatorFor(String name) { return searchCached(plugins, cachedOperators, Plugin::providedOperators, Plugin::getOperator, name); } /** * Gets a numer implementation under the given name. * * @param name the name of the implementation. * @return the implementation class */ public Class<? extends NumberInterface> numberFor(String name) { return searchCached(plugins, cachedNumbers, Plugin::providedNumbers, Plugin::getNumber, name); } /** * Adds an instance of Plugin that already has been instantiated. * * @param plugin the plugin to add. */ public void addInstantiated(Plugin plugin) { if (loadedPluginClasses.contains(plugin.getClass())) return; plugins.add(plugin); loadedPluginClasses.add(plugin.getClass()); } /** * Instantiates a class of plugin, and adds it to this * plugin manager. * * @param newClass the new class to instantiate. */ public void addClass(Class<?> newClass) { if (!Plugin.class.isAssignableFrom(newClass) || newClass == Plugin.class) return; try { addInstantiated((Plugin) newClass.getConstructor(PluginManager.class).newInstance(this)); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { e.printStackTrace(); } } /** * Loads all the plugins in the PluginManager. */ public void load() { for (Plugin plugin : plugins) plugin.enable(); for (Plugin plugin : plugins) { allFunctions.addAll(plugin.providedFunctions()); allOperators.addAll(plugin.providedOperators()); allNumbers.addAll(plugin.providedNumbers()); } listeners.forEach(e -> e.onLoad(this)); } /** * Unloads all the plugins in the PluginManager. */ public void unload() { for (Plugin plugin : plugins) plugin.disable(); allFunctions.clear(); allOperators.clear(); allNumbers.clear(); listeners.forEach(e -> e.onUnload(this)); } /** * Reloads all the plugins in the PluginManager. */ public void reload() { unload(); reload(); } /** * Gets all the functions loaded by the Plugin Manager. * * @return the set of all functions that were loaded. */ public Set<String> getAllFunctions() { return allFunctions; } /** * Gets all the operators loaded by the Plugin Manager. * * @return the set of all operators that were loaded. */ public Set<String> getAllOperators() { return allOperators; } /** * Gets all the number implementations loaded by the Plugin Manager * * @return the set of all implementations that were loaded */ public Set<String> getAllNumbers() { return allNumbers; } /** * Adds a plugin change listener to this plugin manager. * * @param listener the listener to add. */ public void addListener(PluginListener listener) { listeners.add(listener); } /** * Remove the plugin change listener from this plugin manager. * * @param listener the listener to remove. */ public void removeListener(PluginListener listener) { listeners.remove(listener); } }
src/main/java/org/nwapw/abacus/plugin/PluginManager.java
package org.nwapw.abacus.plugin; import org.nwapw.abacus.function.Function; import org.nwapw.abacus.function.Operator; import org.nwapw.abacus.number.NumberInterface; import java.lang.reflect.InvocationTargetException; import java.util.*; /** * A class that controls instances of plugins, allowing for them * to interact with each other and the calculator. */ public class PluginManager { /** * List of classes loaded by this manager. */ private Set<Class<?>> loadedPluginClasses; /** * A list of loaded plugins. */ private Set<Plugin> plugins; /** * List of functions that have been cached, * that is, found in a plugin and returned. */ private Map<String, Function> cachedFunctions; /** * List of operators that have been cached, * that is, found in a plugin and returned. */ private Map<String, Operator> cachedOperators; /** * List of registered number implementations that have * been cached, that is, found in a plugin and returned. */ private Map<String, Class<? extends NumberInterface>> cachedNumbers; /** * List of all functions loaded by the plugins. */ private Set<String> allFunctions; /** * List of all operators loaded by the plugins. */ private Set<String> allOperators; /** * List of all numbers loaded by the plugins. */ private Set<String> allNumbers; /** * The list of plugin listeners attached to this instance. */ private Set<PluginListener> listeners; /** * Creates a new plugin manager. */ public PluginManager() { loadedPluginClasses = new HashSet<>(); plugins = new HashSet<>(); cachedFunctions = new HashMap<>(); cachedOperators = new HashMap<>(); cachedNumbers = new HashMap<>(); allFunctions = new HashSet<>(); allOperators = new HashSet<>(); allNumbers = new HashSet<>(); listeners = new HashSet<>(); } /** * Searches the plugin list for a certain value, retrieving the Plugin's * list of items of the type using the setFunction and getting the value * of it is available via getFunction. If the value is contained * in the cache, it returns the cached value instead. * * @param plugins the plugin list to search. * @param cache the cache to use * @param setFunction the function to retrieve a set of available T's from the plugin * @param getFunction the function to get the T value under the given name * @param name the name to search for * @param <T> the type of element being search * @return the retrieved element, or null if it was not found. */ private static <T, K> T searchCached(Collection<Plugin> plugins, Map<K, T> cache, java.util.function.Function<Plugin, Set<K>> setFunction, java.util.function.BiFunction<Plugin, K, T> getFunction, K name) { if (cache.containsKey(name)) return cache.get(name); T loadedValue = null; for (Plugin plugin : plugins) { if (setFunction.apply(plugin).contains(name)) { loadedValue = getFunction.apply(plugin, name); break; } } cache.put(name, loadedValue); return loadedValue; } /** * Gets a function under the given name. * * @param name the name of the function * @return the function under the given name. */ public Function functionFor(String name) { return searchCached(plugins, cachedFunctions, Plugin::providedFunctions, Plugin::getFunction, name); } /** * Gets an operator under the given name. * * @param name the name of the operator. * @return the operator under the given name. */ public Operator operatorFor(String name) { return searchCached(plugins, cachedOperators, Plugin::providedOperators, Plugin::getOperator, name); } /** * Gets a numer implementation under the given name. * * @param name the name of the implementation. * @return the implementation class */ public Class<? extends NumberInterface> numberFor(String name) { return searchCached(plugins, cachedNumbers, Plugin::providedNumbers, Plugin::getNumber, name); } /** * Adds an instance of Plugin that already has been instantiated. * * @param plugin the plugin to add. */ public void addInstantiated(Plugin plugin) { if (loadedPluginClasses.contains(plugin.getClass())) return; plugins.add(plugin); loadedPluginClasses.add(plugin.getClass()); } /** * Instantiates a class of plugin, and adds it to this * plugin manager. * * @param newClass the new class to instantiate. */ public void addClass(Class<?> newClass) { if (!Plugin.class.isAssignableFrom(newClass) || newClass == Plugin.class) return; try { addInstantiated((Plugin) newClass.getConstructor(PluginManager.class).newInstance(this)); } catch (InstantiationException | IllegalAccessException | NoSuchMethodException | InvocationTargetException e) { e.printStackTrace(); } } /** * Loads all the plugins in the PluginManager. */ public void load() { for (Plugin plugin : plugins) plugin.enable(); for (Plugin plugin : plugins) { allFunctions.addAll(plugin.providedFunctions()); allOperators.addAll(plugin.providedOperators()); allNumbers.addAll(plugin.providedNumbers()); } listeners.forEach(e -> e.onLoad(this)); } /** * Unloads all the plugins in the PluginManager. */ public void unload() { for (Plugin plugin : plugins) plugin.disable(); allFunctions.clear(); allOperators.clear(); allNumbers.clear(); listeners.forEach(e -> e.onUnload(this)); } /** * Reloads all the plugins in the PluginManager. */ public void reload() { unload(); reload(); } /** * Gets all the functions loaded by the Plugin Manager. * * @return the set of all functions that were loaded. */ public Set<String> getAllFunctions() { return allFunctions; } /** * Gets all the operators loaded by the Plugin Manager. * * @return the set of all operators that were loaded. */ public Set<String> getAllOperators() { return allOperators; } /** * Gets all the number implementations loaded by the Plugin Manager * * @return the set of all implementations that were loaded */ public Set<String> getAllNumbers() { return allNumbers; } /** * Adds a plugin change listener to this plugin manager. * * @param listener the listener to add. */ public void addListener(PluginListener listener) { listeners.add(listener); } /** * Remove the plugin change listener from this plugin manager. * * @param listener the listener to remove. */ public void removeListener(PluginListener listener) { listeners.remove(listener); } }
Add a number provider cache.
src/main/java/org/nwapw/abacus/plugin/PluginManager.java
Add a number provider cache.
Java
mit
d8f223eea77a30bfb144355b4d6d946175afa877
0
VsnGamer/ElevatorMod
package xyz.vsngamer.elevator; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.InputEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import xyz.vsngamer.elevator.network.NetworkHandler; import xyz.vsngamer.elevator.network.TeleportHandler; import xyz.vsngamer.elevator.network.TeleportRequest; @SideOnly(Side.CLIENT) public class ElevatorHandler { private static boolean lastSneaking; private static boolean lastJumping; @SubscribeEvent public void onInput(InputEvent inputEvent) { EntityPlayer player = Minecraft.getMinecraft().player; if (!player.isSpectator()) { boolean sneaking = player.isSneaking(); if (lastSneaking != sneaking) { lastSneaking = sneaking; if (sneaking) tryTeleport(player, EnumFacing.DOWN); } boolean jumping = player.isJumping; if (lastJumping != jumping) { lastJumping = jumping; if (jumping) tryTeleport(player, EnumFacing.UP); } } } private static void tryTeleport(EntityPlayer player, EnumFacing facing) { World world = player.world; IBlockState fromState = null, toState; BlockPos fromPos = new BlockPos(player.posX, player.posY + 0.5f, player.posZ); boolean elevator = false; for (int i = 0; i <= 2; i++) { fromState = world.getBlockState(fromPos); if (elevator = TeleportHandler.isElevator(fromState)) break; fromPos = fromPos.down(); } if (!elevator) return; BlockPos.MutableBlockPos toPos = new BlockPos.MutableBlockPos(fromPos); while (true) { toPos.setY(toPos.getY() + facing.getFrontOffsetY()); if (Math.abs(toPos.getY() - fromPos.getY()) > 256) break; toState = world.getBlockState(toPos); if (toState.getBlock() == fromState.getBlock()) { if (TeleportHandler.validateTarget(world, toPos)) { NetworkHandler.networkWrapper.sendToServer(new TeleportRequest(fromPos, toPos)); } break; } } } }
src/main/java/xyz/vsngamer/elevator/ElevatorHandler.java
package xyz.vsngamer.elevator; import net.minecraft.block.state.IBlockState; import net.minecraft.client.Minecraft; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.util.EnumFacing; import net.minecraft.util.math.BlockPos; import net.minecraft.world.World; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.common.gameevent.InputEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import xyz.vsngamer.elevator.network.NetworkHandler; import xyz.vsngamer.elevator.network.TeleportHandler; import xyz.vsngamer.elevator.network.TeleportRequest; @SideOnly(Side.CLIENT) public class ElevatorHandler { private static boolean lastSneaking; private static boolean lastJumping; @SubscribeEvent public void onInput(InputEvent inputEvent) { EntityPlayer player = Minecraft.getMinecraft().player; boolean sneaking = player.isSneaking(); if (lastSneaking != sneaking) { lastSneaking = sneaking; if (sneaking) tryTeleport(player, EnumFacing.DOWN); } boolean jumping = player.isJumping; if (lastJumping != jumping) { lastJumping = jumping; if (jumping) tryTeleport(player, EnumFacing.UP); } } private static void tryTeleport(EntityPlayer player, EnumFacing facing) { World world = player.world; IBlockState fromState = null, toState; BlockPos fromPos = new BlockPos(player.posX, player.posY + 0.5f, player.posZ); boolean elevator = false; for (int i = 0; i <= 2; i++) { fromState = world.getBlockState(fromPos); if (elevator = TeleportHandler.isElevator(fromState)) break; fromPos = fromPos.down(); } if (!elevator) return; BlockPos.MutableBlockPos toPos = new BlockPos.MutableBlockPos(fromPos); while (true) { toPos.setY(toPos.getY() + facing.getFrontOffsetY()); if (Math.abs(toPos.getY() - fromPos.getY()) > 256) break; toState = world.getBlockState(toPos); if (toState.getBlock() == fromState.getBlock()) { if (TeleportHandler.validateTarget(world, toPos)) { NetworkHandler.networkWrapper.sendToServer(new TeleportRequest(fromPos, toPos)); } break; } } } }
Fixed spectators using elevators #21
src/main/java/xyz/vsngamer/elevator/ElevatorHandler.java
Fixed spectators using elevators #21
Java
mit
778c8e6207baaa7486e7ba7a83f693293ded6b2d
0
MooseElkingtons/mutinyirc
package com.mutinycraft.irc; import java.net.*; import java.util.*; import java.util.concurrent.*; import java.util.logging.*; import com.miraclem4n.mchat.types.InfoType; import net.milkbowl.vault.chat.Chat; import org.bukkit.*; import org.bukkit.configuration.file.*; import org.bukkit.entity.*; import com.massivecraft.factions.*; import com.miraclem4n.mchat.api.*; import com.mutinycraft.irc.io.*; import com.mutinycraft.irc.IRCUser.*; import com.mutinycraft.irc.plugin.*; /** * RFC-2812 compliant IRC bridge. * * @author MooseElkingtons */ public class IRC { public ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<String>(); private HashMap<String, String> ircMsgs = new HashMap<String, String>(), gameMsgs = new HashMap<String, String>(); private HashMap<String, Boolean> ircRelays = new HashMap<String, Boolean>(), gameRelays = new HashMap<String, Boolean>(); private String cmdPrefix = ".", ircPrefix = "", gamePrefix = "", nameFormat = ""; private Plugin plugin; private List<IRCListener> listeners = new ArrayList<IRCListener>(); private HashMap<String, IRCUser> bufferNam = new HashMap<String, IRCUser>(); private Socket socket = null; private Thread input = null, output = null; private String nick = "MutinyIRC"; private String pass = null; private String server = ""; private int port = 6667; private int triedNicks = 0; private HashMap<String, List<IRCUser>> channels = new HashMap<String, List<IRCUser>>(); private HashMap<String, String> whos = new HashMap<String, String>(); private List<String> loadship = new ArrayList<String>(); public IRC(Plugin plugin) { this.plugin = plugin; loadStartupCommands(); loadConfig(); this.registerIRCListener(new ControlListener(this, plugin)); IRCCommandListener gl = new IRCCommandListener(this, plugin); this.registerIRCListener(gl); plugin.getServer().getPluginManager().registerEvents(gl, plugin); } public void loadStartupCommands() { loadship.clear(); FileConfiguration cfg = plugin.getConfig(); if(cfg.contains("config.startup_commands")) loadship.addAll(cfg.getStringList("config.startup_commands")); } public void loadConfig() { gameMsgs.clear(); ircMsgs.clear(); gameRelays.clear(); ircRelays.clear(); FileConfiguration cfg = plugin.getConfig(); String imsg = "irc_to_game.messages."; gameMsgs.put("join", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"join")))); gameMsgs.put("part", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"part")))); gameMsgs.put("kick", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"kick")))); gameMsgs.put("msg", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"msg")))); gameMsgs.put("nick", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"nick")))); gameMsgs.put("me", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"me")))); gameMsgs.put("modes", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"modes")))); String gmsg = "game_to_irc.messages."; ircMsgs.put("join", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"join")))); ircMsgs.put("part", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"part")))); ircMsgs.put("kick", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"kick")))); ircMsgs.put("msg", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"msg")))); ircMsgs.put("me", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"me")))); String irel = "irc_to_game.relay."; gameRelays.put("join", cfg.getBoolean(irel+"join")); gameRelays.put("part", cfg.getBoolean(irel+"part")); gameRelays.put("kick", cfg.getBoolean(irel+"kick")); gameRelays.put("msg", cfg.getBoolean(irel+"msg")); gameRelays.put("nick", cfg.getBoolean(irel+"nick")); gameRelays.put("me", cfg.getBoolean(irel+"me")); gameRelays.put("modes", cfg.getBoolean(irel+"modes")); gameRelays.put("color", cfg.getBoolean(irel+"color")); String grel = "game_to_irc.relay."; ircRelays.put("join", cfg.getBoolean(grel+"join")); ircRelays.put("part", cfg.getBoolean(grel+"part")); ircRelays.put("kick", cfg.getBoolean(grel+"kick")); ircRelays.put("msg", cfg.getBoolean(grel+"msg")); ircRelays.put("me", cfg.getBoolean(grel+"me")); ircRelays.put("color", cfg.getBoolean(grel+"color")); cmdPrefix = cfg.getString("config.command_prefix"); ircPrefix = ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString("game_to_irc.prefix"))); gamePrefix = ChatUtil.correctCC(cfg.getString("irc_to_game.prefix")); nameFormat = ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString("game_to_irc.name_format"))); } /** * Joins an IRC channel. * * @param channel The channel to join. */ public void joinChannel(String channel) { sendRaw("JOIN "+channel); } /** * Formats channel names properly. * * @param channel The channel name to format. * @return The formatted channel name. */ public String formatChannel(String channel) { String c = channel.replace(":", ""); if(!isChannel(channel)) c = "#" + channel; return c; } public boolean isChannel(String string) { return string.startsWith("#") || string.startsWith("&") || string.startsWith("+") || string.startsWith("!"); } /** * Queues an action to be sent to IRC. * * @param recipient the receiver of the action. * @param action The action to send. */ public void sendAction(String recipient, String action) { sendCTCP(recipient, "ACTION "+action); } /** * Sends a notice to the recipient. Used for CTCP responses. * * @param recipient The receiver of the notice. * @param message The notice to send. */ public void sendNotice(String recipient, String message) { sendRaw("NOTICE "+recipient+" :"+message); } /** * Queues a message to be sent to IRC. * * @param recipient The receiver of the message. * @param message The message to send. */ public void sendMessage(String recipient, String message) { String msg = message.replace("\r\n", " "); recipient = recipient.replace(":", ""); String preCmd = "PRIVMSG "+recipient+" :"; int clen = 420 - preCmd.length() - recipient.length(); List<String> msgs = new ArrayList<String>(); for(int i = 0; i < message.length(); i+=clen) msgs.add(msg .substring(i, Math.min(message.length(), i + clen))); for(String s : msgs) sendRaw(preCmd+s); } /** * Sends a raw line to IRC. Only recommended for advanced usage. * * @param rawLine The raw line to send to IRC. */ public void sendRaw(String rawLine) { if(!isConnected()) return; queue.add(rawLine); } /** * Sends Client-to-Client Protocol (CTCP) query to specified recipient. * * @param recipient The recipient of the CTCP query * @param ctcp The CTCP message */ public void sendCTCP(String recipient, String ctcp) { sendRaw("PRIVMSG "+recipient+" :\u000001"+ctcp+"\u000001"); } /** * Disconnect/quits from IRC. */ public void disconnect() { if(!isConnected()) return; try { socket.close(); socket = null; input = null; output = null; whos.clear(); channels.clear(); for(IRCListener l : listeners) l.onDisconnect(); } catch(Exception e) { plugin.getLogger().log(Level.SEVERE, "Encountered an error while " + "disconnecting from IRC.", e); } } /** * Attempts to reconnect to IRC. */ public void reconnect() { plugin.getLogger().log(Level.INFO, "Attempting to reconnect to IRC."); disconnect(); connect(socket); } /** * * @return whether or not the IRC bridge is connected to any IRC server. */ public boolean isConnected() { return socket != null && socket.isConnected(); } /** * Connects the IRC bridge to specified socket. * * @param socket the socket for the IRC bridge to connect to. */ public void connect(Socket socket) { if(isConnected()) return; this.socket = socket; server = socket.getInetAddress().getHostAddress(); port = socket.getPort(); input = new Thread(new IRCInputThread(plugin, socket, this)); output = new Thread(new IRCOutputThread(plugin, socket, this)); input.start(); output.start(); for(IRCListener l : listeners) l.onConnect(); } public List<String> getChannels() { return new ArrayList<String>(channels.keySet()); } public CMode getUserCMode(String channel, String nick) { for(IRCUser u : channels.get(channel)) { if(u.getNick().equalsIgnoreCase(nick)) return u.getMode(); } return CMode.CMODE_NORMAL; } /** * Obtains a List containing all of the users in the channel specified. * * @return the user list */ public List<IRCUser> getUsers(String channel) { return channels.get(channel); } /** * Gets every IRC user in every channel we are in. * */ public List<IRCUser> getAllUsers() { List<IRCUser> users = new ArrayList<IRCUser>(); for(String c : getChannels()) users.addAll(channels.get(c)); return users; } /** * Gets the user hostname based on the nick. * * @param nick The nick of the user * @return The hostname */ public String getUserHost(String nick) { return whos.get(nick.toLowerCase()).split("@")[1]; } /** * Gets the user login name based on the nick. * * @param nick The nick of the user * @return The login name */ public String getUserLogin(String nick) { return whos.get(nick.toLowerCase()).split("@")[0]; } /** * Checks if the IRC bridge is in a same channel as the specified user. * * @param nick The nick to check. * @return Whether or not the IRC bridge can see the specified user. */ public boolean isUserVisible(String nick) { return whos.containsKey(nick.toLowerCase()); } /** * Sets the IRC bridge's nickname. * * @param nick */ public void setNick(String nick) { if(isConnected()) sendRaw("NICK "+nick); this.nick = nick; } /** * Gets the name of the IRC Bridge. * * @return IRC Bridge Nick used. */ public String getNick() { return nick; } /** * Gets the server to connect to. * * @return the server to connect to. */ public String getServer() { return server; } /** * The port for the server. * * @return the port. */ public int getPort() { return port; } /** * Sets the server to connect to. Will not modify present connections. * * @param server The server to connect to. */ public void setServer(String server) { this.server = server; } /** * Sets the server port. Default 6667. * * @param port The port. */ public void setPort(int port) { this.port = port; } /** * Sets IRC Server Pass to be used on connection. * * @param pass the pass to use. */ public void setPass(String pass) { this.pass = pass; } /** * Registers an IRCListener to the IRC bridge. */ public void registerIRCListener(IRCListener listener) { listeners.add(listener); } public List<IRCListener> getIRCListeners() { return listeners; } /** * Sends Game message to IRC. * * @param message the string to send to IRC. */ public void sendIrcMessage(String message) { String m = ircPrefix + (ircRelays.get("color") ? ChatUtil.gameToIrcColors(message) : ChatUtil.stripGameColors(message)); for(String channel : channels.keySet()) sendMessage(channel, m); } /** * Sends IRC message to game. * * @param message the string to send to game. */ public void sendGameMessage(String message) { String m = gamePrefix + ( gameRelays.get("color") ? ChatUtil.ircToGameColors(message) : ChatUtil.stripIrcColors(message)); for(Player p : plugin.getServer().getOnlinePlayers()) p.sendMessage(m); if(plugin.isVerbose()) plugin.getServer().getConsoleSender().sendMessage(m); } public String getIrcMessage(String key) { return ircMsgs.get(key); } public String getGameMessage(String key) { return gameMsgs.get(key); } public boolean getIrcRelay(String key) { return ircRelays.get(key); } public boolean getGameRelay(String key) { return gameRelays.get(key); } public String getCommandPrefix() { return cmdPrefix; } public String getIrcMsgPrefix() { return ircPrefix; } public String getGameMsgPrefix() { return gamePrefix; } public String formatGameMessage(Player player, String type) { String name = player.getName(); World world = player.getWorld(); String fname = getIrcMessage(type) .replace("%nf%", nameFormat) .replace("%name%", name) .replace("%dname%", player.getDisplayName()) .replace("%world%", world.getName()); if(plugin.isVaultEnabled()) { Chat chat = plugin.getVaultChat(); String group = chat.getPrimaryGroup(player); fname = fname .replace("%group%", group) .replace("%gprefix%", chat .getGroupPrefix(world, group)) .replace("%gsuffix%", chat .getGroupSuffix(world, group)) .replace("%prefix%", chat.getPlayerPrefix(player)) .replace("%suffix%", chat.getPlayerSuffix(player)); } if(plugin.isMChatEnabled()) { fname = fname.replace("%mprefix%", getPrefix(player)) .replace("%msuffix%", getSuffix(player)); } if(plugin.isFactionsEnabled()) { String tag = P.p.getPlayerFactionTag(player); FPlayer p = FPlayers.i.get(player); boolean peaceful = p.getFaction().isPeaceful(); if(tag.equals("~")) tag = ""; if(peaceful) tag = "\u00A76"+tag; fname = fname.replace("%ftag%", tag); } return ChatUtil.alltrim(ChatUtil.correctCC(fname)); } public String toGameColor(String message) { return gameRelays.get("color") ? ChatUtil.ircToGameColors(message) : ChatUtil.stripIrcColors(message); } public String toIrcColor(String message) { return ircRelays.get("color") ? ChatUtil.gameToIrcColors(message) : ChatUtil.stripGameColors(message); } class ControlListener extends IRCListener { public ControlListener(IRC irc, Plugin plugin) { super(irc, plugin); } @Override public void onConnect() { if(pass != null && !pass.isEmpty()) sendRaw("PASS "+pass); sendRaw("NICK "+nick); sendRaw("USER "+nick+" 0 * :MutinyIRC"); } @Override public void onDisconnect() { plugin.getLogger().log(Level.INFO,"Disconnected from "+server+"."); } @Override public void onPart(String user, String channel) { whos.remove(user.toLowerCase()); channel = channel.replace(":", ""); if(user.equalsIgnoreCase(nick)) channels.remove(channel.toLowerCase()); else sendRaw("NAMES "+channel); } @Override public void onJoin(String user, String login, String host, String channel) { whos.put(user.toLowerCase(), login+"@"+host); channel = channel.replace(":", ""); if(!channels.containsKey(channel.toLowerCase())) channels.put(channel.toLowerCase(), new ArrayList<IRCUser>()); sendRaw("NAMES "+channel); } @Override public void onModeChanged(String channel, String user, String modes) { if(channel.startsWith("#") || channel.startsWith("&") || channel.startsWith("+") || channel.startsWith("!")) sendRaw("NAMES "+channel); } @Override public void onKick(String channel, String user, String kicker) { onPart(channel, user); } @Override public void onPing(String response) { sendRaw("PONG "+response); } @Override public void onNick(String oldNick, String newNick) { String n = oldNick.toLowerCase(); String x = whos.get(n); whos.remove(n); whos.put(newNick.toLowerCase(), x); if(oldNick.equalsIgnoreCase(nick)) nick = newNick; for(String c : channels.keySet()) { for(IRCUser u : channels.get(c)) { if(u.getNick().equalsIgnoreCase(oldNick)) u.setNick(newNick); } } } @Override public void onCTCP(String sender, String ctcp) { String cmd = ctcp.split(" ")[0].toUpperCase(); switch(cmd) { case "VERSION": getIRC().sendNotice(sender, "VERSION MutinyIRC " + "Bridge Plugin by MutinyCraft - http://github." + "com/MooseElkingtons/MutinyIRC (Developed by M" + "ooseElkingtons)"); break; case "PING": String[] crps = ctcp.split(" "); String pres = "PING"; if(crps.length > 1) pres += " "+crps[1]; getIRC().sendNotice(sender, pres); break; } } @Override public void onServerResponse(int code, String response) { String[] res = response.split(" "); switch(code) { case ReplyConstants.RPL_ENDOFMOTD: String chanlist = ""; List<String> clist = plugin.getConfig() .getStringList("config.channels"); for(String chan : clist) { chanlist += chan + ","; } sendRaw("JOIN "+chanlist.substring(0, chanlist.lastIndexOf(','))); for(String chan : clist) { sendRaw("WHO "+chan); } break; case ReplyConstants.RPL_NAMREPLY: String[] msx = response.split(":"); String[] xres = msx[1].split(" "); String chn = msx[0].substring(msx[0].indexOf("#")); if(!isChannel(chn)) return; for(String xrs : xres) { String nick = xrs.substring(1); char xc = xrs.charAt(0); CMode mode = CMode.CMODE_NORMAL; switch(xc) { case '~': mode = CMode.CMODE_OWNER; break; case '&': mode = CMode.CMODE_ADMIN; break; case '@': mode = CMode.CMODE_OP; break; case '%': mode = CMode.CMODE_HOP; break; case '+': mode = CMode.CMODE_VOICE; break; default: mode = CMode.CMODE_NORMAL; nick = xrs; break; } if(!bufferNam.containsKey(nick.toLowerCase())) bufferNam.put(nick.toLowerCase(), new IRCUser(nick, mode, chn.toLowerCase())); } break; case ReplyConstants.RPL_ENDOFNAMES: String channel = res[1].toLowerCase(); channels.remove(channel); if(isChannel(channel)) channels.put(channel, new ArrayList<IRCUser>(bufferNam.values())); bufferNam.clear(); break; case ReplyConstants.RPL_WELCOME: plugin.getLogger().log(Level.INFO, "Successfully connecte"+ "d and registered to "+server+"."); for(String scm : loadship) sendRaw(scm); break; case ReplyConstants.RPL_WHOREPLY: String wnick = res[5].toLowerCase(); String whost = res[3]; String wlogin = res[2]; if(whos.containsKey(wnick)) whos.remove(wnick); whos.put(wnick, wlogin+"@"+whost); break; case ReplyConstants.ERR_ERRONEOUSNICKNAME: case ReplyConstants.ERR_NICKCOLLISION: case ReplyConstants.ERR_NONICKNAMEGIVEN: case ReplyConstants.ERR_NICKNAMEINUSE: plugin.getLogger().log(Level.WARNING, response); if(triedNicks > 4) { plugin.getLogger().log(Level.SEVERE, "Tried changing" + "nick more than 4 times! Quitting IRC."); sendRaw("QUIT"); } setNick(nick+"-"); triedNicks++; break; case ReplyConstants.ERR_ALREADYREGISTERED: plugin.getLogger().log(Level.SEVERE, "ERR_ALREADYREGISTER" + "ED "+response); break; case ReplyConstants.ERR_BADCHANNELKEY: plugin.getLogger().log(Level.SEVERE, "ERR_BADCHANNELKEY "+ response); break; case ReplyConstants.ERR_INVITEONLYCHAN: plugin.getLogger().log(Level.SEVERE, "ERR_INVITEONLYCHAN " + response); break; } } } private static String getSuffix(Player player) { return Reader.getSuffix(player.getName(), InfoType.USER, player.getWorld().getName()); } private static String getPrefix(Player player) { return Reader.getPrefix(player.getName(), InfoType.USER, player.getWorld().getName()); } }
src/com/mutinycraft/irc/IRC.java
package com.mutinycraft.irc; import java.net.*; import java.util.*; import java.util.concurrent.*; import java.util.logging.*; import com.miraclem4n.mchat.types.InfoType; import net.milkbowl.vault.chat.Chat; import org.bukkit.*; import org.bukkit.configuration.file.*; import org.bukkit.entity.*; import com.massivecraft.factions.*; import com.miraclem4n.mchat.api.*; import com.mutinycraft.irc.io.*; import com.mutinycraft.irc.IRCUser.*; import com.mutinycraft.irc.plugin.*; /** * RFC-2812 compliant IRC bridge. * * @author MooseElkingtons */ public class IRC { public ConcurrentLinkedQueue<String> queue = new ConcurrentLinkedQueue<String>(); private HashMap<String, String> ircMsgs = new HashMap<String, String>(), gameMsgs = new HashMap<String, String>(); private HashMap<String, Boolean> ircRelays = new HashMap<String, Boolean>(), gameRelays = new HashMap<String, Boolean>(); private String cmdPrefix = ".", ircPrefix = "", gamePrefix = "", nameFormat = ""; private Plugin plugin; private List<IRCListener> listeners = new ArrayList<IRCListener>(); private HashMap<String, IRCUser> bufferNam = new HashMap<String, IRCUser>(); private Socket socket = null; private Thread input = null, output = null; private String nick = "MutinyIRC"; private String pass = null; private String server = ""; private int port = 6667; private int triedNicks = 0; private HashMap<String, List<IRCUser>> channels = new HashMap<String, List<IRCUser>>(); private HashMap<String, String> whos = new HashMap<String, String>(); private List<String> loadship = new ArrayList<String>(); public IRC(Plugin plugin) { this.plugin = plugin; loadStartupCommands(); loadConfig(); this.registerIRCListener(new ControlListener(this, plugin)); IRCCommandListener gl = new IRCCommandListener(this, plugin); this.registerIRCListener(gl); plugin.getServer().getPluginManager().registerEvents(gl, plugin); } public void loadStartupCommands() { loadship.clear(); FileConfiguration cfg = plugin.getConfig(); if(cfg.contains("config.startup_commands")) loadship.addAll(cfg.getStringList("config.startup_commands")); } public void loadConfig() { gameMsgs.clear(); ircMsgs.clear(); gameRelays.clear(); ircRelays.clear(); FileConfiguration cfg = plugin.getConfig(); String imsg = "irc_to_game.messages."; gameMsgs.put("join", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"join")))); gameMsgs.put("part", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"part")))); gameMsgs.put("kick", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"kick")))); gameMsgs.put("msg", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"msg")))); gameMsgs.put("nick", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"nick")))); gameMsgs.put("me", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"me")))); gameMsgs.put("modes", ChatUtil.ircToGameColors(ChatUtil.correctCC( cfg.getString(imsg+"modes")))); String gmsg = "game_to_irc.messages."; ircMsgs.put("join", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"join")))); ircMsgs.put("part", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"part")))); ircMsgs.put("kick", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"kick")))); ircMsgs.put("msg", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"msg")))); ircMsgs.put("me", ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString(gmsg+"me")))); String irel = "irc_to_game.relay."; gameRelays.put("join", cfg.getBoolean(irel+"join")); gameRelays.put("part", cfg.getBoolean(irel+"part")); gameRelays.put("kick", cfg.getBoolean(irel+"kick")); gameRelays.put("msg", cfg.getBoolean(irel+"msg")); gameRelays.put("nick", cfg.getBoolean(irel+"nick")); gameRelays.put("me", cfg.getBoolean(irel+"me")); gameRelays.put("modes", cfg.getBoolean(irel+"modes")); gameRelays.put("color", cfg.getBoolean(irel+"color")); String grel = "game_to_irc.relay."; ircRelays.put("join", cfg.getBoolean(grel+"join")); ircRelays.put("part", cfg.getBoolean(grel+"part")); ircRelays.put("kick", cfg.getBoolean(grel+"kick")); ircRelays.put("msg", cfg.getBoolean(grel+"msg")); ircRelays.put("me", cfg.getBoolean(grel+"me")); ircRelays.put("color", cfg.getBoolean(grel+"color")); cmdPrefix = cfg.getString("config.command_prefix"); ircPrefix = ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString("game_to_irc.prefix"))); gamePrefix = ChatUtil.correctCC(cfg.getString("irc_to_game.prefix")); nameFormat = ChatUtil.gameToIrcColors( ChatUtil.correctCC(cfg.getString("game_to_irc.name_format"))); } /** * Joins an IRC channel. * * @param channel The channel to join. */ public void joinChannel(String channel) { sendRaw("JOIN "+channel); } /** * Formats channel names properly. * * @param channel The channel name to format. * @return The formatted channel name. */ public String formatChannel(String channel) { String c = channel.replace(":", ""); if(!isChannel(channel)) c = "#" + channel; return c; } public boolean isChannel(String string) { return string.startsWith("#") || string.startsWith("&") || string.startsWith("+") || string.startsWith("!"); } /** * Queues an action to be sent to IRC. * * @param recipient the receiver of the action. * @param action The action to send. */ public void sendAction(String recipient, String action) { sendCTCP(recipient, "ACTION "+action); } /** * Sends a notice to the recipient. Used for CTCP responses. * * @param recipient The receiver of the notice. * @param message The notice to send. */ public void sendNotice(String recipient, String message) { sendRaw("NOTICE "+recipient+" :"+message); } /** * Queues a message to be sent to IRC. * * @param recipient The receiver of the message. * @param message The message to send. */ public void sendMessage(String recipient, String message) { recipient = recipient.replace(":", ""); String preCmd = "PRIVMSG "+recipient+" :"; int clen = 420 - preCmd.length() - recipient.length(); List<String> msgs = new ArrayList<String>(); for(int i = 0; i < message.length(); i+=clen) msgs.add(message .substring(i, Math.min(message.length(), i + clen))); for(String s : msgs) sendRaw(preCmd+s); } /** * Sends a raw line to IRC. Only recommended for advanced usage. * * @param rawLine The raw line to send to IRC. */ public void sendRaw(String rawLine) { if(!isConnected()) return; queue.add(rawLine); } /** * Sends Client-to-Client Protocol (CTCP) query to specified recipient. * * @param recipient The recipient of the CTCP query * @param ctcp The CTCP message */ public void sendCTCP(String recipient, String ctcp) { sendRaw("PRIVMSG "+recipient+" :\u000001"+ctcp+"\u000001"); } /** * Disconnect/quits from IRC. */ public void disconnect() { if(!isConnected()) return; try { socket.close(); socket = null; input = null; output = null; whos.clear(); channels.clear(); for(IRCListener l : listeners) l.onDisconnect(); } catch(Exception e) { plugin.getLogger().log(Level.SEVERE, "Encountered an error while " + "disconnecting from IRC.", e); } } /** * Attempts to reconnect to IRC. */ public void reconnect() { plugin.getLogger().log(Level.INFO, "Attempting to reconnect to IRC."); disconnect(); connect(socket); } /** * * @return whether or not the IRC bridge is connected to any IRC server. */ public boolean isConnected() { return socket != null && socket.isConnected(); } /** * Connects the IRC bridge to specified socket. * * @param socket the socket for the IRC bridge to connect to. */ public void connect(Socket socket) { if(isConnected()) return; this.socket = socket; server = socket.getInetAddress().getHostAddress(); port = socket.getPort(); input = new Thread(new IRCInputThread(plugin, socket, this)); output = new Thread(new IRCOutputThread(plugin, socket, this)); input.start(); output.start(); for(IRCListener l : listeners) l.onConnect(); } public List<String> getChannels() { return new ArrayList<String>(channels.keySet()); } public CMode getUserCMode(String channel, String nick) { for(IRCUser u : channels.get(channel)) { if(u.getNick().equalsIgnoreCase(nick)) return u.getMode(); } return CMode.CMODE_NORMAL; } /** * Obtains a List containing all of the users in the channel specified. * * @return the user list */ public List<IRCUser> getUsers(String channel) { return channels.get(channel); } /** * Gets every IRC user in every channel we are in. * */ public List<IRCUser> getAllUsers() { List<IRCUser> users = new ArrayList<IRCUser>(); for(String c : getChannels()) users.addAll(channels.get(c)); return users; } /** * Gets the user hostname based on the nick. * * @param nick The nick of the user * @return The hostname */ public String getUserHost(String nick) { return whos.get(nick.toLowerCase()).split("@")[1]; } /** * Gets the user login name based on the nick. * * @param nick The nick of the user * @return The login name */ public String getUserLogin(String nick) { return whos.get(nick.toLowerCase()).split("@")[0]; } /** * Checks if the IRC bridge is in a same channel as the specified user. * * @param nick The nick to check. * @return Whether or not the IRC bridge can see the specified user. */ public boolean isUserVisible(String nick) { return whos.containsKey(nick.toLowerCase()); } /** * Sets the IRC bridge's nickname. * * @param nick */ public void setNick(String nick) { if(isConnected()) sendRaw("NICK "+nick); this.nick = nick; } /** * Gets the name of the IRC Bridge. * * @return IRC Bridge Nick used. */ public String getNick() { return nick; } /** * Gets the server to connect to. * * @return the server to connect to. */ public String getServer() { return server; } /** * The port for the server. * * @return the port. */ public int getPort() { return port; } /** * Sets the server to connect to. Will not modify present connections. * * @param server The server to connect to. */ public void setServer(String server) { this.server = server; } /** * Sets the server port. Default 6667. * * @param port The port. */ public void setPort(int port) { this.port = port; } /** * Sets IRC Server Pass to be used on connection. * * @param pass the pass to use. */ public void setPass(String pass) { this.pass = pass; } /** * Registers an IRCListener to the IRC bridge. */ public void registerIRCListener(IRCListener listener) { listeners.add(listener); } public List<IRCListener> getIRCListeners() { return listeners; } /** * Sends Game message to IRC. * * @param message the string to send to IRC. */ public void sendIrcMessage(String message) { String m = ircPrefix + (ircRelays.get("color") ? ChatUtil.gameToIrcColors(message) : ChatUtil.stripGameColors(message)); for(String channel : channels.keySet()) sendMessage(channel, m); } /** * Sends IRC message to game. * * @param message the string to send to game. */ public void sendGameMessage(String message) { String m = gamePrefix + ( gameRelays.get("color") ? ChatUtil.ircToGameColors(message) : ChatUtil.stripIrcColors(message)); for(Player p : plugin.getServer().getOnlinePlayers()) p.sendMessage(m); if(plugin.isVerbose()) plugin.getServer().getConsoleSender().sendMessage(m); } public String getIrcMessage(String key) { return ircMsgs.get(key); } public String getGameMessage(String key) { return gameMsgs.get(key); } public boolean getIrcRelay(String key) { return ircRelays.get(key); } public boolean getGameRelay(String key) { return gameRelays.get(key); } public String getCommandPrefix() { return cmdPrefix; } public String getIrcMsgPrefix() { return ircPrefix; } public String getGameMsgPrefix() { return gamePrefix; } public String formatGameMessage(Player player, String type) { String name = player.getName(); World world = player.getWorld(); String fname = getIrcMessage(type) .replace("%nf%", nameFormat) .replace("%name%", name) .replace("%dname%", player.getDisplayName()) .replace("%world%", world.getName()); if(plugin.isVaultEnabled()) { Chat chat = plugin.getVaultChat(); String group = chat.getPrimaryGroup(player); fname = fname .replace("%group%", group) .replace("%gprefix%", chat .getGroupPrefix(world, group)) .replace("%gsuffix%", chat .getGroupSuffix(world, group)) .replace("%prefix%", chat.getPlayerPrefix(player)) .replace("%suffix%", chat.getPlayerSuffix(player)); } if(plugin.isMChatEnabled()) { fname = fname.replace("%mprefix%", getPrefix(player)) .replace("%msuffix%", getSuffix(player)); } if(plugin.isFactionsEnabled()) { String tag = P.p.getPlayerFactionTag(player); FPlayer p = FPlayers.i.get(player); boolean peaceful = p.getFaction().isPeaceful(); if(tag.equals("~")) tag = ""; if(peaceful) tag = "\u00A76"+tag; fname = fname.replace("%ftag%", tag); } return ChatUtil.alltrim(ChatUtil.correctCC(fname)); } public String toGameColor(String message) { return gameRelays.get("color") ? ChatUtil.ircToGameColors(message) : ChatUtil.stripIrcColors(message); } public String toIrcColor(String message) { return ircRelays.get("color") ? ChatUtil.gameToIrcColors(message) : ChatUtil.stripGameColors(message); } class ControlListener extends IRCListener { public ControlListener(IRC irc, Plugin plugin) { super(irc, plugin); } @Override public void onConnect() { if(pass != null && !pass.isEmpty()) sendRaw("PASS "+pass); sendRaw("NICK "+nick); sendRaw("USER "+nick+" 0 * :MutinyIRC"); } @Override public void onDisconnect() { plugin.getLogger().log(Level.INFO,"Disconnected from "+server+"."); } @Override public void onPart(String user, String channel) { whos.remove(user.toLowerCase()); channel = channel.replace(":", ""); if(user.equalsIgnoreCase(nick)) channels.remove(channel.toLowerCase()); else sendRaw("NAMES "+channel); } @Override public void onJoin(String user, String login, String host, String channel) { whos.put(user.toLowerCase(), login+"@"+host); channel = channel.replace(":", ""); if(!channels.containsKey(channel.toLowerCase())) channels.put(channel.toLowerCase(), new ArrayList<IRCUser>()); sendRaw("NAMES "+channel); } @Override public void onModeChanged(String channel, String user, String modes) { if(channel.startsWith("#") || channel.startsWith("&") || channel.startsWith("+") || channel.startsWith("!")) sendRaw("NAMES "+channel); } @Override public void onKick(String channel, String user, String kicker) { onPart(channel, user); } @Override public void onPing(String response) { sendRaw("PONG "+response); } @Override public void onNick(String oldNick, String newNick) { String n = oldNick.toLowerCase(); String x = whos.get(n); whos.remove(n); whos.put(newNick.toLowerCase(), x); if(oldNick.equalsIgnoreCase(nick)) nick = newNick; for(String c : channels.keySet()) { for(IRCUser u : channels.get(c)) { if(u.getNick().equalsIgnoreCase(oldNick)) u.setNick(newNick); } } } @Override public void onCTCP(String sender, String ctcp) { String cmd = ctcp.split(" ")[0].toUpperCase(); switch(cmd) { case "VERSION": getIRC().sendNotice(sender, "VERSION MutinyIRC " + "Bridge Plugin by MutinyCraft - http://github." + "com/MooseElkingtons/MutinyIRC (Developed by M" + "ooseElkingtons)"); break; case "PING": String[] crps = ctcp.split(" "); String pres = "PING"; if(crps.length > 1) pres += " "+crps[1]; getIRC().sendNotice(sender, pres); break; } } @Override public void onServerResponse(int code, String response) { String[] res = response.split(" "); switch(code) { case ReplyConstants.RPL_ENDOFMOTD: String chanlist = ""; List<String> clist = plugin.getConfig() .getStringList("config.channels"); for(String chan : clist) { chanlist += chan + ","; } sendRaw("JOIN "+chanlist.substring(0, chanlist.lastIndexOf(','))); for(String chan : clist) { sendRaw("WHO "+chan); } break; case ReplyConstants.RPL_NAMREPLY: String[] msx = response.split(":"); String[] xres = msx[1].split(" "); String chn = msx[0].substring(msx[0].indexOf("#")); if(!isChannel(chn)) return; for(String xrs : xres) { String nick = xrs.substring(1); char xc = xrs.charAt(0); CMode mode = CMode.CMODE_NORMAL; switch(xc) { case '~': mode = CMode.CMODE_OWNER; break; case '&': mode = CMode.CMODE_ADMIN; break; case '@': mode = CMode.CMODE_OP; break; case '%': mode = CMode.CMODE_HOP; break; case '+': mode = CMode.CMODE_VOICE; break; default: mode = CMode.CMODE_NORMAL; nick = xrs; break; } if(!bufferNam.containsKey(nick.toLowerCase())) bufferNam.put(nick.toLowerCase(), new IRCUser(nick, mode, chn.toLowerCase())); } break; case ReplyConstants.RPL_ENDOFNAMES: String channel = res[1].toLowerCase(); channels.remove(channel); if(isChannel(channel)) channels.put(channel, new ArrayList<IRCUser>(bufferNam.values())); bufferNam.clear(); break; case ReplyConstants.RPL_WELCOME: plugin.getLogger().log(Level.INFO, "Successfully connecte"+ "d and registered to "+server+"."); for(String scm : loadship) sendRaw(scm); break; case ReplyConstants.RPL_WHOREPLY: String wnick = res[5].toLowerCase(); String whost = res[3]; String wlogin = res[2]; if(whos.containsKey(wnick)) whos.remove(wnick); whos.put(wnick, wlogin+"@"+whost); break; case ReplyConstants.ERR_ERRONEOUSNICKNAME: case ReplyConstants.ERR_NICKCOLLISION: case ReplyConstants.ERR_NONICKNAMEGIVEN: case ReplyConstants.ERR_NICKNAMEINUSE: plugin.getLogger().log(Level.WARNING, response); if(triedNicks > 4) { plugin.getLogger().log(Level.SEVERE, "Tried changing" + "nick more than 4 times! Quitting IRC."); sendRaw("QUIT"); } setNick(nick+"-"); triedNicks++; break; case ReplyConstants.ERR_ALREADYREGISTERED: plugin.getLogger().log(Level.SEVERE, "ERR_ALREADYREGISTER" + "ED "+response); break; case ReplyConstants.ERR_BADCHANNELKEY: plugin.getLogger().log(Level.SEVERE, "ERR_BADCHANNELKEY "+ response); break; case ReplyConstants.ERR_INVITEONLYCHAN: plugin.getLogger().log(Level.SEVERE, "ERR_INVITEONLYCHAN " + response); break; } } } private static String getSuffix(Player player) { return Reader.getSuffix(player.getName(), InfoType.USER, player.getWorld().getName()); } private static String getPrefix(Player player) { return Reader.getPrefix(player.getName(), InfoType.USER, player.getWorld().getName()); } }
Fixed PRIVMSG being cut off by return line carriage feed.
src/com/mutinycraft/irc/IRC.java
Fixed PRIVMSG being cut off by return line carriage feed.
Java
mit
fadf937e7f096f17d5ae5d2c86ff8d65d219807e
0
creativetrendsapps/SimplicityBrowser,creativetrendsapps/SimplicityBrowser
package com.creativetrends.simplicity.app.activities; import android.app.Activity; import android.app.Dialog; import android.content.DialogInterface; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatDialogFragment; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.WindowManager; import android.widget.EditText; import com.creativetrends.simplicity.app.R; public class ShortcutActivity extends AppCompatDialogFragment { // The public interface is used to send information back to the activity that called ShortcutActivity. public interface CreateHomeScreenSchortcutListener { void onCreateHomeScreenShortcutCancel(DialogFragment dialog); void onCreateHomeScreenShortcutCreate(DialogFragment dialog); } private CreateHomeScreenSchortcutListener buttonListener; // Check to make sure that the activity that called ShortcutActivity implements both listeners. public void onAttach(Activity activity) { super.onAttach(activity); try { buttonListener = (CreateHomeScreenSchortcutListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement CreateHomeScreenShortcutListener."); } } // onCreateDialog requires @NonNull. @Override @NonNull public Dialog onCreateDialog(Bundle savedInstanceState) { // Create a drawable version of the favorite icon. Drawable favoriteIconDrawable = new BitmapDrawable(getResources(), favoriteIcon); // Use AlertDialog.Builder to create the AlertDialog AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(getActivity()); LayoutInflater customDialogInflater = getActivity().getLayoutInflater(); alertDialogBuilder.setTitle(R.string.shortcut_name); alertDialogBuilder.setIcon(favoriteIconDrawable); alertDialogBuilder.setView(customDialogInflater.inflate(R.layout.activity_shortcut, null)); alertDialogBuilder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { buttonListener.onCreateHomeScreenShortcutCancel(ShortcutActivity.this); } }); alertDialogBuilder.setPositiveButton(R.string.create, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { buttonListener.onCreateHomeScreenShortcutCreate(ShortcutActivity.this); } }); // Assign the alertDialogBuilder to an AlertDialog. final AlertDialog alertDialog = alertDialogBuilder.create(); // Show the keyboard when the dialog is displayed on the screen. alertDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); // We need to show alertDialog before we can setOnKeyListener below. alertDialog.show(); EditText shortcutNameEditText = (EditText) alertDialog.findViewById(R.id.shortcutNameEditText); // Allow the "enter" key on the keyboard to create the shortcut. assert shortcutNameEditText != null; shortcutNameEditText.setOnKeyListener(new View.OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { // If the event is a key-down event on the "enter" button, select the PositiveButton "Create". if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) { // Trigger the create listener. buttonListener.onCreateHomeScreenShortcutCreate(ShortcutActivity.this); // Manually dismiss alertDialog. alertDialog.dismiss(); // Consume the event. return true; } else { // If any other key was pressed, do not consume the event. return false; } } }); // onCreateDialog requires the return of an AlertDialog. return alertDialog; } }
app/src/main/java/com/creativetrends/simplicity/app/activities/ShortcutActivity.java
package com.creativetrends.simplicity.app.activities; import android.app.Activity; import android.app.Dialog; import android.content.DialogInterface; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.v4.app.DialogFragment; import android.support.v7.app.AlertDialog; import android.support.v7.app.AppCompatDialogFragment; import android.view.KeyEvent; import android.view.LayoutInflater; import android.view.View; import android.view.WindowManager; import android.widget.EditText; import com.creativetrends.simplicity.app.R; public class ShortcutActivity extends AppCompatDialogFragment { // The public interface is used to send information back to the activity that called ShortcutActivity. public interface CreateHomeScreenSchortcutListener { void onCreateHomeScreenShortcutCancel(DialogFragment dialog); void onCreateHomeScreenShortcutCreate(DialogFragment dialog); } private CreateHomeScreenSchortcutListener buttonListener; // Check to make sure that the activity that called ShortcutActivity implements both listeners. public void onAttach(Activity activity) { super.onAttach(activity); try { buttonListener = (CreateHomeScreenSchortcutListener) activity; } catch (ClassCastException e) { throw new ClassCastException(activity.toString() + " must implement CreateHomeScreenShortcutListener."); } } // onCreateDialog requires @NonNull. @Override @NonNull public Dialog onCreateDialog(Bundle savedInstanceState) { // Create a drawable version of the favorite icon. Drawable favoriteIconDrawable = new BitmapDrawable(getResources(), MainActivity.favoriteIcon); // Use AlertDialog.Builder to create the AlertDialog AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(getActivity()); LayoutInflater customDialogInflater = getActivity().getLayoutInflater(); alertDialogBuilder.setTitle(R.string.shortcut_name); alertDialogBuilder.setIcon(favoriteIconDrawable); alertDialogBuilder.setView(customDialogInflater.inflate(R.layout.activity_shortcut, null)); alertDialogBuilder.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { buttonListener.onCreateHomeScreenShortcutCancel(ShortcutActivity.this); } }); alertDialogBuilder.setPositiveButton(R.string.create, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { buttonListener.onCreateHomeScreenShortcutCreate(ShortcutActivity.this); } }); // Assign the alertDialogBuilder to an AlertDialog. final AlertDialog alertDialog = alertDialogBuilder.create(); // Show the keyboard when the dialog is displayed on the screen. alertDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); // We need to show alertDialog before we can setOnKeyListener below. alertDialog.show(); EditText shortcutNameEditText = (EditText) alertDialog.findViewById(R.id.shortcutNameEditText); // Allow the "enter" key on the keyboard to create the shortcut. assert shortcutNameEditText != null; shortcutNameEditText.setOnKeyListener(new View.OnKeyListener() { public boolean onKey(View v, int keyCode, KeyEvent event) { // If the event is a key-down event on the "enter" button, select the PositiveButton "Create". if ((event.getAction() == KeyEvent.ACTION_DOWN) && (keyCode == KeyEvent.KEYCODE_ENTER)) { // Trigger the create listener. buttonListener.onCreateHomeScreenShortcutCreate(ShortcutActivity.this); // Manually dismiss alertDialog. alertDialog.dismiss(); // Consume the event. return true; } else { // If any other key was pressed, do not consume the event. return false; } } }); // onCreateDialog requires the return of an AlertDialog. return alertDialog; } }
Update ShortcutActivity.java
app/src/main/java/com/creativetrends/simplicity/app/activities/ShortcutActivity.java
Update ShortcutActivity.java
Java
mit
ba0060c072eca0f1026df2da3900fe496239c840
0
bullhorn/sdk-rest
package com.bullhornsdk.data.model.entity.core.standard; import javax.validation.constraints.Size; import org.joda.time.DateTime; import com.bullhornsdk.data.model.entity.core.type.AbstractEntity; import com.bullhornsdk.data.model.entity.core.type.AssociationEntity; import com.bullhornsdk.data.model.entity.core.type.CreateEntity; import com.bullhornsdk.data.model.entity.core.type.DateLastModifiedEntity; import com.bullhornsdk.data.model.entity.core.type.QueryEntity; import com.bullhornsdk.data.model.entity.core.type.SoftDeleteEntity; import com.bullhornsdk.data.model.entity.core.type.UpdateEntity; import com.bullhornsdk.data.model.entity.embedded.LinkedId; import com.bullhornsdk.data.model.entity.embedded.LinkedPerson; import com.bullhornsdk.data.model.entity.embedded.OneToManyLinkedId; import com.bullhornsdk.data.validation.BullhornUUID; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.annotation.JsonRootName; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonRootName(value = "data") @JsonPropertyOrder({ "id", "appointmentUUID", "attendees", "candidateReference", "childAppointments", "clientContactReference", "communicationMethod", "dateAdded", "dateBegin", "dateEnd", "dateLastModified", "description", "isAllDay", "isDeleted", "isPrivate", "jobOrder", "lead", "location", "migrateGUID", "notificationMinutes", "opportunity", "owner", "parentAppointment", "placement", "recurrenceDayBits", "recurrenceFrequency", "recurrenceMax", "recurrenceMonthBits", "recurrenceStyle", "recurrenceType", "showTimeAs", "subject", "timeZoneID", "type" }) public class Appointment extends AbstractEntity implements QueryEntity, UpdateEntity, CreateEntity, SoftDeleteEntity, DateLastModifiedEntity { private Integer id; @BullhornUUID private String appointmentUUID; private OneToManyLinkedId attendees; private Candidate candidateReference; private OneToManyLinkedId childAppointments; private ClientContact clientContactReference; @Size(max = 30) private String communicationMethod; private DateTime dateAdded; private DateTime dateBegin; private DateTime dateEnd; private DateTime dateLastModified; private String description; private Boolean isAllDay; private Boolean isDeleted; private Boolean isPrivate; private JobOrder jobOrder; private Lead lead; @Size(max = 100) private String location; private Object migrateGUID; private Integer notificationMinutes; private Opportunity opportunity; private LinkedPerson owner; private LinkedId parentAppointment; private LinkedId placement; private Integer recurrenceDayBits; private Integer recurrenceFrequency; private Integer recurrenceMax; private Integer recurrenceMonthBits; @Size(max = 10) private String recurrenceStyle; @Size(max = 1) private String recurrenceType; private String showTimeAs; @Size(max = 100) private String subject; private String timeZoneID; @Size(max = 30) private String type; @Override @JsonProperty("id") public Integer getId() { return id; } public Appointment() { super(); } /** * Returns the entity with the required fields for an insert set. * * @return */ public Appointment instantiateForInsert() { Appointment entity = new Appointment(); entity.setIsDeleted(Boolean.FALSE); entity.setIsPrivate(Boolean.FALSE); return entity; } @Override @JsonProperty("id") public void setId(Integer id) { this.id = id; } @JsonProperty("appointmentUUID") public String getAppointmentUUID() { return appointmentUUID; } @JsonProperty("appointmentUUID") public void setAppointmentUUID(String appointmentUUID) { this.appointmentUUID = appointmentUUID; } @JsonIgnore public OneToManyLinkedId getAttendees() { return attendees; } @JsonProperty("attendees") public void setAttendees(OneToManyLinkedId attendees) { this.attendees = attendees; } @JsonProperty("candidateReference") public Candidate getCandidateReference() { return candidateReference; } @JsonProperty("candidateReference") public void setCandidateReference(Candidate candidateReference) { this.candidateReference = candidateReference; } @JsonIgnore public OneToManyLinkedId getChildAppointments() { return childAppointments; } @JsonProperty("childAppointments") public void setChildAppointments(OneToManyLinkedId childAppointments) { this.childAppointments = childAppointments; } @JsonProperty("clientContactReference") public ClientContact getClientContactReference() { return clientContactReference; } @JsonProperty("clientContactReference") public void setClientContactReference(ClientContact clientContactReference) { this.clientContactReference = clientContactReference; } @JsonProperty("communicationMethod") public String getCommunicationMethod() { return communicationMethod; } @JsonProperty("communicationMethod") public void setCommunicationMethod(String communicationMethod) { this.communicationMethod = communicationMethod; } @JsonProperty("dateAdded") public DateTime getDateAdded() { return dateAdded; } @JsonProperty("dateAdded") public void setDateAdded(DateTime dateAdded) { this.dateAdded = dateAdded; } @JsonProperty("dateBegin") public DateTime getDateBegin() { return dateBegin; } @JsonProperty("dateBegin") public void setDateBegin(DateTime dateBegin) { this.dateBegin = dateBegin; } @JsonProperty("dateEnd") public DateTime getDateEnd() { return dateEnd; } @JsonProperty("dateEnd") public void setDateEnd(DateTime dateEnd) { this.dateEnd = dateEnd; } @JsonProperty("dateLastModified") public DateTime getDateLastModified() { return dateLastModified; } @JsonProperty("dateLastModified") public void setDateLastModified(DateTime dateLastModified) { this.dateLastModified = dateLastModified; } @JsonProperty("description") public String getDescription() { return description; } @JsonProperty("description") public void setDescription(String description) { this.description = description; } @JsonProperty("isAllDay") public Boolean getIsAllDay() { return isAllDay; } @JsonProperty("isAllDay") public void setIsAllDay(Boolean isAllDay) { this.isAllDay = isAllDay; } @JsonProperty("isDeleted") public Boolean getIsDeleted() { return isDeleted; } @JsonProperty("isDeleted") public void setIsDeleted(Boolean isDeleted) { this.isDeleted = isDeleted; } @JsonProperty("isPrivate") public Boolean getIsPrivate() { return isPrivate; } @JsonProperty("isPrivate") public void setIsPrivate(Boolean isPrivate) { this.isPrivate = isPrivate; } @JsonProperty("jobOrder") public JobOrder getJobOrder() { return jobOrder; } @JsonProperty("jobOrder") public void setJobOrder(JobOrder jobOrder) { this.jobOrder = jobOrder; } @JsonProperty("lead") public Lead getLead() { return lead; } @JsonProperty("lead") public void setLead(Lead lead) { this.lead = lead; } @JsonProperty("location") public String getLocation() { return location; } @JsonProperty("location") public void setLocation(String location) { this.location = location; } @JsonProperty("migrateGUID") public Object getMigrateGUID() { return migrateGUID; } @JsonProperty("migrateGUID") public void setMigrateGUID(Object migrateGUID) { this.migrateGUID = migrateGUID; } @JsonProperty("notificationMinutes") public Integer getNotificationMinutes() { return notificationMinutes; } @JsonProperty("notificationMinutes") public void setNotificationMinutes(Integer notificationMinutes) { this.notificationMinutes = notificationMinutes; } @JsonProperty("opportunity") public Opportunity getOpportunity() { return opportunity; } @JsonProperty("opportunity") public void setOpportunity(Opportunity opportunity) { this.opportunity = opportunity; } @JsonProperty("owner") public LinkedPerson getOwner() { return owner; } @JsonProperty("owner") public void setOwner(LinkedPerson owner) { this.owner = owner; } @JsonProperty("parentAppointment") public LinkedId getParentAppointment() { return parentAppointment; } @JsonProperty("parentAppointment") public void setParentAppointment(LinkedId parentAppointment) { this.parentAppointment = parentAppointment; } @JsonProperty("placement") public LinkedId getPlacement() { return placement; } @JsonProperty("placement") public void setPlacement(LinkedId placement) { this.placement = placement; } @JsonProperty("recurrenceDayBits") public Integer getRecurrenceDayBits() { return recurrenceDayBits; } @JsonProperty("recurrenceDayBits") public void setRecurrenceDayBits(Integer recurrenceDayBits) { this.recurrenceDayBits = recurrenceDayBits; } @JsonProperty("recurrenceFrequency") public Integer getRecurrenceFrequency() { return recurrenceFrequency; } @JsonProperty("recurrenceFrequency") public void setRecurrenceFrequency(Integer recurrenceFrequency) { this.recurrenceFrequency = recurrenceFrequency; } @JsonProperty("recurrenceMax") public Integer getRecurrenceMax() { return recurrenceMax; } @JsonProperty("recurrenceMax") public void setRecurrenceMax(Integer recurrenceMax) { this.recurrenceMax = recurrenceMax; } @JsonProperty("recurrenceMonthBits") public Integer getRecurrenceMonthBits() { return recurrenceMonthBits; } @JsonProperty("recurrenceMonthBits") public void setRecurrenceMonthBits(Integer recurrenceMonthBits) { this.recurrenceMonthBits = recurrenceMonthBits; } @JsonProperty("recurrenceStyle") public String getRecurrenceStyle() { return recurrenceStyle; } @JsonProperty("recurrenceStyle") public void setRecurrenceStyle(String recurrenceStyle) { this.recurrenceStyle = recurrenceStyle; } @JsonProperty("recurrenceType") public String getRecurrenceType() { return recurrenceType; } @JsonProperty("recurrenceType") public void setRecurrenceType(String recurrenceType) { this.recurrenceType = recurrenceType; } @JsonProperty("showTimeAs") public String getShowTimeAs() { return showTimeAs; } @JsonProperty("showTimeAs") public void setShowTimeAs(String showTimeAs) { this.showTimeAs = showTimeAs; } @JsonProperty("subject") public String getSubject() { return subject; } @JsonProperty("subject") public void setSubject(String subject) { this.subject = subject; } @JsonProperty("timeZoneID") public String getTimeZoneID() { return timeZoneID; } @JsonProperty("timeZoneID") public void setTimeZoneID(String timeZoneID) { this.timeZoneID = timeZoneID; } @JsonProperty("type") public String getType() { return type; } @JsonProperty("type") public void setType(String type) { this.type = type; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Appointment that = (Appointment) o; if (id != null ? !id.equals(that.id) : that.id != null) return false; if (appointmentUUID != null ? !appointmentUUID.equals(that.appointmentUUID) : that.appointmentUUID != null) return false; if (attendees != null ? !attendees.equals(that.attendees) : that.attendees != null) return false; if (candidateReference != null ? !candidateReference.equals(that.candidateReference) : that.candidateReference != null) return false; if (childAppointments != null ? !childAppointments.equals(that.childAppointments) : that.childAppointments != null) return false; if (clientContactReference != null ? !clientContactReference.equals(that.clientContactReference) : that.clientContactReference != null) return false; if (communicationMethod != null ? !communicationMethod.equals(that.communicationMethod) : that.communicationMethod != null) return false; if (dateAdded != null ? !dateAdded.equals(that.dateAdded) : that.dateAdded != null) return false; if (dateBegin != null ? !dateBegin.equals(that.dateBegin) : that.dateBegin != null) return false; if (dateEnd != null ? !dateEnd.equals(that.dateEnd) : that.dateEnd != null) return false; if (dateLastModified != null ? !dateLastModified.equals(that.dateLastModified) : that.dateLastModified != null) return false; if (description != null ? !description.equals(that.description) : that.description != null) return false; if (isAllDay != null ? !isAllDay.equals(that.isAllDay) : that.isAllDay != null) return false; if (isDeleted != null ? !isDeleted.equals(that.isDeleted) : that.isDeleted != null) return false; if (isPrivate != null ? !isPrivate.equals(that.isPrivate) : that.isPrivate != null) return false; if (jobOrder != null ? !jobOrder.equals(that.jobOrder) : that.jobOrder != null) return false; if (lead != null ? !lead.equals(that.lead) : that.lead != null) return false; if (location != null ? !location.equals(that.location) : that.location != null) return false; if (migrateGUID != null ? !migrateGUID.equals(that.migrateGUID) : that.migrateGUID != null) return false; if (notificationMinutes != null ? !notificationMinutes.equals(that.notificationMinutes) : that.notificationMinutes != null) return false; if (opportunity != null ? !opportunity.equals(that.opportunity) : that.opportunity != null) return false; if (owner != null ? !owner.equals(that.owner) : that.owner != null) return false; if (parentAppointment != null ? !parentAppointment.equals(that.parentAppointment) : that.parentAppointment != null) return false; if (placement != null ? !placement.equals(that.placement) : that.placement != null) return false; if (recurrenceDayBits != null ? !recurrenceDayBits.equals(that.recurrenceDayBits) : that.recurrenceDayBits != null) return false; if (recurrenceFrequency != null ? !recurrenceFrequency.equals(that.recurrenceFrequency) : that.recurrenceFrequency != null) return false; if (recurrenceMax != null ? !recurrenceMax.equals(that.recurrenceMax) : that.recurrenceMax != null) return false; if (recurrenceMonthBits != null ? !recurrenceMonthBits.equals(that.recurrenceMonthBits) : that.recurrenceMonthBits != null) return false; if (recurrenceStyle != null ? !recurrenceStyle.equals(that.recurrenceStyle) : that.recurrenceStyle != null) return false; if (recurrenceType != null ? !recurrenceType.equals(that.recurrenceType) : that.recurrenceType != null) return false; if (showTimeAs != null ? !showTimeAs.equals(that.showTimeAs) : that.showTimeAs != null) return false; if (subject != null ? !subject.equals(that.subject) : that.subject != null) return false; if (timeZoneID != null ? !timeZoneID.equals(that.timeZoneID) : that.timeZoneID != null) return false; return !(type != null ? !type.equals(that.type) : that.type != null); } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (appointmentUUID != null ? appointmentUUID.hashCode() : 0); result = 31 * result + (attendees != null ? attendees.hashCode() : 0); result = 31 * result + (candidateReference != null ? candidateReference.hashCode() : 0); result = 31 * result + (childAppointments != null ? childAppointments.hashCode() : 0); result = 31 * result + (clientContactReference != null ? clientContactReference.hashCode() : 0); result = 31 * result + (communicationMethod != null ? communicationMethod.hashCode() : 0); result = 31 * result + (dateAdded != null ? dateAdded.hashCode() : 0); result = 31 * result + (dateBegin != null ? dateBegin.hashCode() : 0); result = 31 * result + (dateEnd != null ? dateEnd.hashCode() : 0); result = 31 * result + (dateLastModified != null ? dateLastModified.hashCode() : 0); result = 31 * result + (description != null ? description.hashCode() : 0); result = 31 * result + (isAllDay != null ? isAllDay.hashCode() : 0); result = 31 * result + (isDeleted != null ? isDeleted.hashCode() : 0); result = 31 * result + (isPrivate != null ? isPrivate.hashCode() : 0); result = 31 * result + (jobOrder != null ? jobOrder.hashCode() : 0); result = 31 * result + (lead != null ? lead.hashCode() : 0); result = 31 * result + (location != null ? location.hashCode() : 0); result = 31 * result + (migrateGUID != null ? migrateGUID.hashCode() : 0); result = 31 * result + (notificationMinutes != null ? notificationMinutes.hashCode() : 0); result = 31 * result + (opportunity != null ? opportunity.hashCode() : 0); result = 31 * result + (owner != null ? owner.hashCode() : 0); result = 31 * result + (parentAppointment != null ? parentAppointment.hashCode() : 0); result = 31 * result + (placement != null ? placement.hashCode() : 0); result = 31 * result + (recurrenceDayBits != null ? recurrenceDayBits.hashCode() : 0); result = 31 * result + (recurrenceFrequency != null ? recurrenceFrequency.hashCode() : 0); result = 31 * result + (recurrenceMax != null ? recurrenceMax.hashCode() : 0); result = 31 * result + (recurrenceMonthBits != null ? recurrenceMonthBits.hashCode() : 0); result = 31 * result + (recurrenceStyle != null ? recurrenceStyle.hashCode() : 0); result = 31 * result + (recurrenceType != null ? recurrenceType.hashCode() : 0); result = 31 * result + (showTimeAs != null ? showTimeAs.hashCode() : 0); result = 31 * result + (subject != null ? subject.hashCode() : 0); result = 31 * result + (timeZoneID != null ? timeZoneID.hashCode() : 0); result = 31 * result + (type != null ? type.hashCode() : 0); return result; } }
src/main/java/com/bullhornsdk/data/model/entity/core/standard/Appointment.java
package com.bullhornsdk.data.model.entity.core.standard; import javax.validation.constraints.Size; import org.joda.time.DateTime; import com.bullhornsdk.data.model.entity.core.type.AbstractEntity; import com.bullhornsdk.data.model.entity.core.type.AssociationEntity; import com.bullhornsdk.data.model.entity.core.type.CreateEntity; import com.bullhornsdk.data.model.entity.core.type.DateLastModifiedEntity; import com.bullhornsdk.data.model.entity.core.type.QueryEntity; import com.bullhornsdk.data.model.entity.core.type.SoftDeleteEntity; import com.bullhornsdk.data.model.entity.core.type.UpdateEntity; import com.bullhornsdk.data.model.entity.embedded.LinkedId; import com.bullhornsdk.data.model.entity.embedded.LinkedPerson; import com.bullhornsdk.data.model.entity.embedded.OneToManyLinkedId; import com.bullhornsdk.data.validation.BullhornUUID; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.annotation.JsonRootName; @JsonInclude(JsonInclude.Include.NON_NULL) @JsonRootName(value = "data") @JsonPropertyOrder({ "id", "appointmentUUID", "attendees", "candidateReference", "childAppointments", "clientContactReference", "communicationMethod", "dateAdded", "dateBegin", "dateEnd", "dateLastModified", "description", "isAllDay", "isDeleted", "isPrivate", "jobOrder", "lead", "location", "migrateGUID", "notificationMinutes", "opportunity", "owner", "parentAppointment", "placement", "recurrenceDayBits", "recurrenceFrequency", "recurrenceMax", "recurrenceMonthBits", "recurrenceStyle", "recurrenceType", "showTimeAs", "subject", "timeZoneID", "type" }) public class Appointment extends AbstractEntity implements QueryEntity, UpdateEntity, CreateEntity, SoftDeleteEntity, DateLastModifiedEntity, AssociationEntity { private Integer id; @BullhornUUID private String appointmentUUID; private OneToManyLinkedId attendees; private Candidate candidateReference; private OneToManyLinkedId childAppointments; private ClientContact clientContactReference; @Size(max = 30) private String communicationMethod; private DateTime dateAdded; private DateTime dateBegin; private DateTime dateEnd; private DateTime dateLastModified; private String description; private Boolean isAllDay; private Boolean isDeleted; private Boolean isPrivate; private JobOrder jobOrder; private Lead lead; @Size(max = 100) private String location; private Object migrateGUID; private Integer notificationMinutes; private Opportunity opportunity; private LinkedPerson owner; private LinkedId parentAppointment; private LinkedId placement; private Integer recurrenceDayBits; private Integer recurrenceFrequency; private Integer recurrenceMax; private Integer recurrenceMonthBits; @Size(max = 10) private String recurrenceStyle; @Size(max = 1) private String recurrenceType; private String showTimeAs; @Size(max = 100) private String subject; private String timeZoneID; @Size(max = 30) private String type; @Override @JsonProperty("id") public Integer getId() { return id; } public Appointment() { super(); } /** * Returns the entity with the required fields for an insert set. * * @return */ public Appointment instantiateForInsert() { Appointment entity = new Appointment(); entity.setIsDeleted(Boolean.FALSE); entity.setIsPrivate(Boolean.FALSE); return entity; } @Override @JsonProperty("id") public void setId(Integer id) { this.id = id; } @JsonProperty("appointmentUUID") public String getAppointmentUUID() { return appointmentUUID; } @JsonProperty("appointmentUUID") public void setAppointmentUUID(String appointmentUUID) { this.appointmentUUID = appointmentUUID; } @JsonIgnore public OneToManyLinkedId getAttendees() { return attendees; } @JsonProperty("attendees") public void setAttendees(OneToManyLinkedId attendees) { this.attendees = attendees; } @JsonProperty("candidateReference") public Candidate getCandidateReference() { return candidateReference; } @JsonProperty("candidateReference") public void setCandidateReference(Candidate candidateReference) { this.candidateReference = candidateReference; } @JsonIgnore public OneToManyLinkedId getChildAppointments() { return childAppointments; } @JsonProperty("childAppointments") public void setChildAppointments(OneToManyLinkedId childAppointments) { this.childAppointments = childAppointments; } @JsonProperty("clientContactReference") public ClientContact getClientContactReference() { return clientContactReference; } @JsonProperty("clientContactReference") public void setClientContactReference(ClientContact clientContactReference) { this.clientContactReference = clientContactReference; } @JsonProperty("communicationMethod") public String getCommunicationMethod() { return communicationMethod; } @JsonProperty("communicationMethod") public void setCommunicationMethod(String communicationMethod) { this.communicationMethod = communicationMethod; } @JsonProperty("dateAdded") public DateTime getDateAdded() { return dateAdded; } @JsonProperty("dateAdded") public void setDateAdded(DateTime dateAdded) { this.dateAdded = dateAdded; } @JsonProperty("dateBegin") public DateTime getDateBegin() { return dateBegin; } @JsonProperty("dateBegin") public void setDateBegin(DateTime dateBegin) { this.dateBegin = dateBegin; } @JsonProperty("dateEnd") public DateTime getDateEnd() { return dateEnd; } @JsonProperty("dateEnd") public void setDateEnd(DateTime dateEnd) { this.dateEnd = dateEnd; } @JsonProperty("dateLastModified") public DateTime getDateLastModified() { return dateLastModified; } @JsonProperty("dateLastModified") public void setDateLastModified(DateTime dateLastModified) { this.dateLastModified = dateLastModified; } @JsonProperty("description") public String getDescription() { return description; } @JsonProperty("description") public void setDescription(String description) { this.description = description; } @JsonProperty("isAllDay") public Boolean getIsAllDay() { return isAllDay; } @JsonProperty("isAllDay") public void setIsAllDay(Boolean isAllDay) { this.isAllDay = isAllDay; } @JsonProperty("isDeleted") public Boolean getIsDeleted() { return isDeleted; } @JsonProperty("isDeleted") public void setIsDeleted(Boolean isDeleted) { this.isDeleted = isDeleted; } @JsonProperty("isPrivate") public Boolean getIsPrivate() { return isPrivate; } @JsonProperty("isPrivate") public void setIsPrivate(Boolean isPrivate) { this.isPrivate = isPrivate; } @JsonProperty("jobOrder") public JobOrder getJobOrder() { return jobOrder; } @JsonProperty("jobOrder") public void setJobOrder(JobOrder jobOrder) { this.jobOrder = jobOrder; } @JsonProperty("lead") public Lead getLead() { return lead; } @JsonProperty("lead") public void setLead(Lead lead) { this.lead = lead; } @JsonProperty("location") public String getLocation() { return location; } @JsonProperty("location") public void setLocation(String location) { this.location = location; } @JsonProperty("migrateGUID") public Object getMigrateGUID() { return migrateGUID; } @JsonProperty("migrateGUID") public void setMigrateGUID(Object migrateGUID) { this.migrateGUID = migrateGUID; } @JsonProperty("notificationMinutes") public Integer getNotificationMinutes() { return notificationMinutes; } @JsonProperty("notificationMinutes") public void setNotificationMinutes(Integer notificationMinutes) { this.notificationMinutes = notificationMinutes; } @JsonProperty("opportunity") public Opportunity getOpportunity() { return opportunity; } @JsonProperty("opportunity") public void setOpportunity(Opportunity opportunity) { this.opportunity = opportunity; } @JsonProperty("owner") public LinkedPerson getOwner() { return owner; } @JsonProperty("owner") public void setOwner(LinkedPerson owner) { this.owner = owner; } @JsonProperty("parentAppointment") public LinkedId getParentAppointment() { return parentAppointment; } @JsonProperty("parentAppointment") public void setParentAppointment(LinkedId parentAppointment) { this.parentAppointment = parentAppointment; } @JsonProperty("placement") public LinkedId getPlacement() { return placement; } @JsonProperty("placement") public void setPlacement(LinkedId placement) { this.placement = placement; } @JsonProperty("recurrenceDayBits") public Integer getRecurrenceDayBits() { return recurrenceDayBits; } @JsonProperty("recurrenceDayBits") public void setRecurrenceDayBits(Integer recurrenceDayBits) { this.recurrenceDayBits = recurrenceDayBits; } @JsonProperty("recurrenceFrequency") public Integer getRecurrenceFrequency() { return recurrenceFrequency; } @JsonProperty("recurrenceFrequency") public void setRecurrenceFrequency(Integer recurrenceFrequency) { this.recurrenceFrequency = recurrenceFrequency; } @JsonProperty("recurrenceMax") public Integer getRecurrenceMax() { return recurrenceMax; } @JsonProperty("recurrenceMax") public void setRecurrenceMax(Integer recurrenceMax) { this.recurrenceMax = recurrenceMax; } @JsonProperty("recurrenceMonthBits") public Integer getRecurrenceMonthBits() { return recurrenceMonthBits; } @JsonProperty("recurrenceMonthBits") public void setRecurrenceMonthBits(Integer recurrenceMonthBits) { this.recurrenceMonthBits = recurrenceMonthBits; } @JsonProperty("recurrenceStyle") public String getRecurrenceStyle() { return recurrenceStyle; } @JsonProperty("recurrenceStyle") public void setRecurrenceStyle(String recurrenceStyle) { this.recurrenceStyle = recurrenceStyle; } @JsonProperty("recurrenceType") public String getRecurrenceType() { return recurrenceType; } @JsonProperty("recurrenceType") public void setRecurrenceType(String recurrenceType) { this.recurrenceType = recurrenceType; } @JsonProperty("showTimeAs") public String getShowTimeAs() { return showTimeAs; } @JsonProperty("showTimeAs") public void setShowTimeAs(String showTimeAs) { this.showTimeAs = showTimeAs; } @JsonProperty("subject") public String getSubject() { return subject; } @JsonProperty("subject") public void setSubject(String subject) { this.subject = subject; } @JsonProperty("timeZoneID") public String getTimeZoneID() { return timeZoneID; } @JsonProperty("timeZoneID") public void setTimeZoneID(String timeZoneID) { this.timeZoneID = timeZoneID; } @JsonProperty("type") public String getType() { return type; } @JsonProperty("type") public void setType(String type) { this.type = type; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; Appointment that = (Appointment) o; if (id != null ? !id.equals(that.id) : that.id != null) return false; if (appointmentUUID != null ? !appointmentUUID.equals(that.appointmentUUID) : that.appointmentUUID != null) return false; if (attendees != null ? !attendees.equals(that.attendees) : that.attendees != null) return false; if (candidateReference != null ? !candidateReference.equals(that.candidateReference) : that.candidateReference != null) return false; if (childAppointments != null ? !childAppointments.equals(that.childAppointments) : that.childAppointments != null) return false; if (clientContactReference != null ? !clientContactReference.equals(that.clientContactReference) : that.clientContactReference != null) return false; if (communicationMethod != null ? !communicationMethod.equals(that.communicationMethod) : that.communicationMethod != null) return false; if (dateAdded != null ? !dateAdded.equals(that.dateAdded) : that.dateAdded != null) return false; if (dateBegin != null ? !dateBegin.equals(that.dateBegin) : that.dateBegin != null) return false; if (dateEnd != null ? !dateEnd.equals(that.dateEnd) : that.dateEnd != null) return false; if (dateLastModified != null ? !dateLastModified.equals(that.dateLastModified) : that.dateLastModified != null) return false; if (description != null ? !description.equals(that.description) : that.description != null) return false; if (isAllDay != null ? !isAllDay.equals(that.isAllDay) : that.isAllDay != null) return false; if (isDeleted != null ? !isDeleted.equals(that.isDeleted) : that.isDeleted != null) return false; if (isPrivate != null ? !isPrivate.equals(that.isPrivate) : that.isPrivate != null) return false; if (jobOrder != null ? !jobOrder.equals(that.jobOrder) : that.jobOrder != null) return false; if (lead != null ? !lead.equals(that.lead) : that.lead != null) return false; if (location != null ? !location.equals(that.location) : that.location != null) return false; if (migrateGUID != null ? !migrateGUID.equals(that.migrateGUID) : that.migrateGUID != null) return false; if (notificationMinutes != null ? !notificationMinutes.equals(that.notificationMinutes) : that.notificationMinutes != null) return false; if (opportunity != null ? !opportunity.equals(that.opportunity) : that.opportunity != null) return false; if (owner != null ? !owner.equals(that.owner) : that.owner != null) return false; if (parentAppointment != null ? !parentAppointment.equals(that.parentAppointment) : that.parentAppointment != null) return false; if (placement != null ? !placement.equals(that.placement) : that.placement != null) return false; if (recurrenceDayBits != null ? !recurrenceDayBits.equals(that.recurrenceDayBits) : that.recurrenceDayBits != null) return false; if (recurrenceFrequency != null ? !recurrenceFrequency.equals(that.recurrenceFrequency) : that.recurrenceFrequency != null) return false; if (recurrenceMax != null ? !recurrenceMax.equals(that.recurrenceMax) : that.recurrenceMax != null) return false; if (recurrenceMonthBits != null ? !recurrenceMonthBits.equals(that.recurrenceMonthBits) : that.recurrenceMonthBits != null) return false; if (recurrenceStyle != null ? !recurrenceStyle.equals(that.recurrenceStyle) : that.recurrenceStyle != null) return false; if (recurrenceType != null ? !recurrenceType.equals(that.recurrenceType) : that.recurrenceType != null) return false; if (showTimeAs != null ? !showTimeAs.equals(that.showTimeAs) : that.showTimeAs != null) return false; if (subject != null ? !subject.equals(that.subject) : that.subject != null) return false; if (timeZoneID != null ? !timeZoneID.equals(that.timeZoneID) : that.timeZoneID != null) return false; return !(type != null ? !type.equals(that.type) : that.type != null); } @Override public int hashCode() { int result = id != null ? id.hashCode() : 0; result = 31 * result + (appointmentUUID != null ? appointmentUUID.hashCode() : 0); result = 31 * result + (attendees != null ? attendees.hashCode() : 0); result = 31 * result + (candidateReference != null ? candidateReference.hashCode() : 0); result = 31 * result + (childAppointments != null ? childAppointments.hashCode() : 0); result = 31 * result + (clientContactReference != null ? clientContactReference.hashCode() : 0); result = 31 * result + (communicationMethod != null ? communicationMethod.hashCode() : 0); result = 31 * result + (dateAdded != null ? dateAdded.hashCode() : 0); result = 31 * result + (dateBegin != null ? dateBegin.hashCode() : 0); result = 31 * result + (dateEnd != null ? dateEnd.hashCode() : 0); result = 31 * result + (dateLastModified != null ? dateLastModified.hashCode() : 0); result = 31 * result + (description != null ? description.hashCode() : 0); result = 31 * result + (isAllDay != null ? isAllDay.hashCode() : 0); result = 31 * result + (isDeleted != null ? isDeleted.hashCode() : 0); result = 31 * result + (isPrivate != null ? isPrivate.hashCode() : 0); result = 31 * result + (jobOrder != null ? jobOrder.hashCode() : 0); result = 31 * result + (lead != null ? lead.hashCode() : 0); result = 31 * result + (location != null ? location.hashCode() : 0); result = 31 * result + (migrateGUID != null ? migrateGUID.hashCode() : 0); result = 31 * result + (notificationMinutes != null ? notificationMinutes.hashCode() : 0); result = 31 * result + (opportunity != null ? opportunity.hashCode() : 0); result = 31 * result + (owner != null ? owner.hashCode() : 0); result = 31 * result + (parentAppointment != null ? parentAppointment.hashCode() : 0); result = 31 * result + (placement != null ? placement.hashCode() : 0); result = 31 * result + (recurrenceDayBits != null ? recurrenceDayBits.hashCode() : 0); result = 31 * result + (recurrenceFrequency != null ? recurrenceFrequency.hashCode() : 0); result = 31 * result + (recurrenceMax != null ? recurrenceMax.hashCode() : 0); result = 31 * result + (recurrenceMonthBits != null ? recurrenceMonthBits.hashCode() : 0); result = 31 * result + (recurrenceStyle != null ? recurrenceStyle.hashCode() : 0); result = 31 * result + (recurrenceType != null ? recurrenceType.hashCode() : 0); result = 31 * result + (showTimeAs != null ? showTimeAs.hashCode() : 0); result = 31 * result + (subject != null ? subject.hashCode() : 0); result = 31 * result + (timeZoneID != null ? timeZoneID.hashCode() : 0); result = 31 * result + (type != null ? type.hashCode() : 0); return result; } }
BH-39745: removing association code that isn't needed
src/main/java/com/bullhornsdk/data/model/entity/core/standard/Appointment.java
BH-39745: removing association code that isn't needed
Java
mit
6ec06f158892e6806cf41d9a40e10e5858aa5a0d
0
margnus1/micro-c,margnus1/micro-c
package mips; /** * Created by Magnus on 2014-03-05. */ public abstract class MipsOutputStream { public abstract void emitProcedure(String label); public abstract void emitInstruction(String instruction, String... arguments); public abstract void emitLabel(String label); /* Convenience wrappers around emitInstruction */ public void emitMemory(String instruction, MipsRegister data, int offset, MipsRegister addrReg) { if (offset == 0) emitInstruction(instruction, data.toString(), "(" + addrReg + ")"); else emitInstruction(instruction, data.toString(), offset + "(" + addrReg + ")"); } public void emitMemory(String instruction, MipsRegister data, String label, MipsRegister addrReg) { if (addrReg.equals(MipsRegister.ZERO)) emitInstruction(instruction, data.toString(), label); else emitInstruction(instruction, data.toString(), label + "(" + addrReg + ")"); } public void emitInstruction(Object instruction, Object... arguments) { String[] argStrings = new String[arguments.length]; for (int i = 0; i < arguments.length; i++) argStrings[i] = arguments[i].toString(); emitInstruction(instruction.toString(), argStrings); } @Deprecated public void emitRType(MipsROp op, MipsRegister rs, MipsRegister rt, MipsRegister rd) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), rd.toString()); } @Deprecated public void emitIType(MipsIOp op, MipsRegister rs, MipsRegister rt, int immediate) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), Integer.toString(immediate)); } @Deprecated public void emitIType(MipsIOp op, MipsRegister rs, MipsRegister rt, String label) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), label); } @Deprecated public void emitJType(MipsJOp op, String label) { emitInstruction(op.toString().toLowerCase(), label); } public abstract void emitGlobal(String label, int size); }
src/mips/MipsOutputStream.java
package mips; /** * Created by Magnus on 2014-03-05. */ public abstract class MipsOutputStream { public abstract void emitProcedure(String label); public abstract void emitInstruction(String instruction, String... arguments); public abstract void emitLabel(String label); /* Convenience wrappers around emitInstruction */ public void emitMemory(String instruction, MipsRegister data, int offset, MipsRegister addrReg) { emitInstruction(instruction, data.toString(), offset + "(" + addrReg + ")"); } public void emitMemory(String instruction, MipsRegister data, String label, MipsRegister addrReg) { emitInstruction(instruction, data.toString(), label + "(" + addrReg + ")"); } public void emitInstruction(Object instruction, Object... arguments) { String[] argStrings = new String[arguments.length]; for (int i = 0; i < arguments.length; i++) argStrings[i] = arguments[i].toString(); emitInstruction(instruction.toString(), argStrings); } @Deprecated public void emitRType(MipsROp op, MipsRegister rs, MipsRegister rt, MipsRegister rd) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), rd.toString()); } @Deprecated public void emitIType(MipsIOp op, MipsRegister rs, MipsRegister rt, int immediate) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), Integer.toString(immediate)); } @Deprecated public void emitIType(MipsIOp op, MipsRegister rs, MipsRegister rt, String label) { emitInstruction(op.toString().toLowerCase(), rs.toString(), rt.toString(), label); } @Deprecated public void emitJType(MipsJOp op, String label) { emitInstruction(op.toString().toLowerCase(), label); } public abstract void emitGlobal(String label, int size); }
Nicer output of memory instructions
src/mips/MipsOutputStream.java
Nicer output of memory instructions
Java
mit
e4d5f2326a8474160d71b77c311dbff55c2f83b7
0
braintree/braintree_android,braintree/braintree_android,braintree/braintree_android,braintree/braintree_android
package com.braintreepayments.api; import android.content.Context; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; /** * Fetches information about which payment methods are preferred on the device. * Used to determine which payment methods are given preference in your UI, * not whether they are presented entirely. * This class is currently in beta and may change in future releases. */ public class PreferredPaymentMethodsClient { private final BraintreeClient braintreeClient; private final DeviceInspector deviceInspector; public PreferredPaymentMethodsClient(@NonNull BraintreeClient braintreeClient) { this(braintreeClient, new DeviceInspector()); } @VisibleForTesting PreferredPaymentMethodsClient(BraintreeClient braintreeClient, DeviceInspector deviceInspector) { this.braintreeClient = braintreeClient; this.deviceInspector = deviceInspector; } /** * Fetches information about which payment methods should be given preference in your UI. * * @param context Android Context * @param callback {@link PreferredPaymentMethodsCallback} */ public void fetchPreferredPaymentMethods(@NonNull final Context context, @NonNull final PreferredPaymentMethodsCallback callback) { final Context applicationContext = context.getApplicationContext(); final boolean isVenmoAppInstalled = deviceInspector.isVenmoInstalled(applicationContext); final boolean isPayPalAppInstalled = deviceInspector.isPayPalInstalled(applicationContext); final String venmoAppInstalledEvent = String.format("preferred-payment-methods.venmo.app-installed.%b", isVenmoAppInstalled); braintreeClient.sendAnalyticsEvent(venmoAppInstalledEvent); if (isPayPalAppInstalled) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.paypal.app-installed.true"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(true) .isVenmoPreferred(isVenmoAppInstalled)); return; } braintreeClient.getConfiguration(new ConfigurationCallback() { @Override public void onResult(@Nullable Configuration configuration, @Nullable Exception error) { boolean isGraphQLDisabled = (configuration == null || !configuration.isGraphQLEnabled()); if (isGraphQLDisabled) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.api-disabled"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(isPayPalAppInstalled) .isVenmoPreferred(isVenmoAppInstalled)); return; } final String query = "{ \"query\": \"query PreferredPaymentMethods { preferredPaymentMethods { paypalPreferred } }\" }"; braintreeClient.sendGraphQLPOST(query, new HttpResponseCallback() { @Override public void success(String responseBody) { PreferredPaymentMethodsResult result = PreferredPaymentMethodsResult.fromJSON(responseBody, isVenmoAppInstalled); String payPalPreferredEvent = String.format("preferred-payment-methods.paypal.api-detected.%b", result.isPayPalPreferred()); braintreeClient.sendAnalyticsEvent(payPalPreferredEvent); callback.onResult(result); } @Override public void failure(Exception exception) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.api-error"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(false) .isVenmoPreferred(isVenmoAppInstalled)); } }); } }); } }
BraintreeCore/src/main/java/com/braintreepayments/api/PreferredPaymentMethodsClient.java
package com.braintreepayments.api; import android.content.Context; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.VisibleForTesting; /** * Fetches information about which payment methods are preferred on the device. * Used to determine which payment methods are given preference in your UI, * not whether they are presented entirely. * This class is currently in beta and may change in future releases. */ public class PreferredPaymentMethodsClient { private final BraintreeClient braintreeClient; private final DeviceInspector deviceInspector; public PreferredPaymentMethodsClient(@NonNull BraintreeClient braintreeClient) { this(braintreeClient, new DeviceInspector()); } @VisibleForTesting PreferredPaymentMethodsClient(@NonNull BraintreeClient braintreeClient, @NonNull DeviceInspector deviceInspector) { this.braintreeClient = braintreeClient; this.deviceInspector = deviceInspector; } /** * Fetches information about which payment methods should be given preference in your UI. * * @param context Android Context * @param callback {@link PreferredPaymentMethodsCallback} */ public void fetchPreferredPaymentMethods(@NonNull final Context context, @NonNull final PreferredPaymentMethodsCallback callback) { final Context applicationContext = context.getApplicationContext(); final boolean isVenmoAppInstalled = deviceInspector.isVenmoInstalled(applicationContext); final boolean isPayPalAppInstalled = deviceInspector.isPayPalInstalled(applicationContext); final String venmoAppInstalledEvent = String.format("preferred-payment-methods.venmo.app-installed.%b", isVenmoAppInstalled); braintreeClient.sendAnalyticsEvent(venmoAppInstalledEvent); if (isPayPalAppInstalled) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.paypal.app-installed.true"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(true) .isVenmoPreferred(isVenmoAppInstalled)); return; } braintreeClient.getConfiguration(new ConfigurationCallback() { @Override public void onResult(@Nullable Configuration configuration, @Nullable Exception error) { boolean isGraphQLDisabled = (configuration == null || !configuration.isGraphQLEnabled()); if (isGraphQLDisabled) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.api-disabled"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(isPayPalAppInstalled) .isVenmoPreferred(isVenmoAppInstalled)); return; } final String query = "{ \"query\": \"query PreferredPaymentMethods { preferredPaymentMethods { paypalPreferred } }\" }"; braintreeClient.sendGraphQLPOST(query, new HttpResponseCallback() { @Override public void success(String responseBody) { PreferredPaymentMethodsResult result = PreferredPaymentMethodsResult.fromJSON(responseBody, isVenmoAppInstalled); String payPalPreferredEvent = String.format("preferred-payment-methods.paypal.api-detected.%b", result.isPayPalPreferred()); braintreeClient.sendAnalyticsEvent(payPalPreferredEvent); callback.onResult(result); } @Override public void failure(Exception exception) { braintreeClient.sendAnalyticsEvent("preferred-payment-methods.api-error"); callback.onResult(new PreferredPaymentMethodsResult() .isPayPalPreferred(false) .isVenmoPreferred(isVenmoAppInstalled)); } }); } }); } }
Remove non-public PreferredPaymentMethodsClient constructor annotation. Signed-off-by: Steven Shropshire <e0eaefab33cb0c3d300c8879d154a2846a56c9dd@getbraintree.com>
BraintreeCore/src/main/java/com/braintreepayments/api/PreferredPaymentMethodsClient.java
Remove non-public PreferredPaymentMethodsClient constructor annotation.
Java
epl-1.0
bb041c90962ac19758f67b8dd636a18134ea210c
0
lalitsolanki12/eclipse4book,vogella/eclipse4book,vogellacompany/eclipse4book
package com.example.e4.rcp.todo.parts; import java.util.HashMap; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import org.eclipse.core.databinding.beans.BeanProperties; import org.eclipse.core.databinding.observable.list.WritableList; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.IJobManager; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.core.runtime.jobs.ProgressProvider; import org.eclipse.e4.core.di.annotations.Optional; import org.eclipse.e4.core.services.events.IEventBroker; import org.eclipse.e4.core.services.nls.Translation; import org.eclipse.e4.ui.di.Focus; import org.eclipse.e4.ui.di.UIEventTopic; import org.eclipse.e4.ui.di.UISynchronize; import org.eclipse.e4.ui.model.application.MApplication; import org.eclipse.e4.ui.model.application.ui.basic.MWindow; import org.eclipse.e4.ui.model.application.ui.menu.MToolControl; import org.eclipse.e4.ui.services.EMenuService; import org.eclipse.e4.ui.workbench.modeling.EModelService; import org.eclipse.e4.ui.workbench.modeling.ESelectionService; import org.eclipse.jface.databinding.viewers.ViewerSupport; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.EditingSupport; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TableViewerColumn; import org.eclipse.jface.viewers.TextCellEditor; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.Text; import org.osgi.service.event.Event; import com.example.e4.rcp.todo.events.MyEventConstants; import com.example.e4.rcp.todo.i18n.Messages; import com.example.e4.rcp.todo.model.ITodoService; import com.example.e4.rcp.todo.model.Todo; public class TodoOverviewPart { private Button btnNewButton; private Label lblNewLabel; private TableViewer viewer; @Inject UISynchronize sync; @Inject ESelectionService service; @Inject EModelService modelService; @Inject MApplication application; @Inject IEventBroker broker; @Inject ITodoService model; private WritableList writableList; protected String searchString = ""; private TableViewerColumn colDescription; private TableViewerColumn colSummary; @PostConstruct public void createControls(Composite parent, EMenuService menuService, @Translation Messages message) { parent.setLayout(new GridLayout(1, false)); btnNewButton = new Button(parent, SWT.PUSH); btnNewButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { final MToolControl toolControl = (MToolControl) modelService.find( "statusbar", application); toolControl.setVisible(true); Job job = new Job("loading") { @Override protected IStatus run(IProgressMonitor monitor) { toolControl.setVisible(false); final List<Todo> list = model.getTodos(); System.out.println(list); broker.post( MyEventConstants.TOPIC_TODOS_CHANGED, new Event(MyEventConstants.TOPIC_TODOS_CHANGED, new HashMap<String, String>())); return Status.OK_STATUS; } }; if (toolControl != null) { IJobManager jobManager = job.getJobManager(); Object widget = toolControl.getObject(); final IProgressMonitor p = (IProgressMonitor) widget; ProgressProvider provider = new ProgressProvider() { @Override public IProgressMonitor createMonitor(Job job) { return p; } }; jobManager.setProgressProvider(provider); } job.schedule(); } }); btnNewButton.setText("Load Data"); Text search = new Text(parent, SWT.SEARCH | SWT.CANCEL | SWT.ICON_SEARCH); // Assuming that GridLayout is used search.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1)); search.setMessage("Filter"); // Filter at every keystroke search.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { Text source = (Text) e.getSource(); searchString = source.getText(); // Trigger update in the viewer viewer.refresh(); } }); // SWT.SEARCH | SWT.CANCEL not supported under Windows7 // This does not work under Windows7 search.addSelectionListener(new SelectionAdapter() { @Override public void widgetDefaultSelected(SelectionEvent e) { if (e.detail == SWT.CANCEL) { Text text = (Text) e.getSource(); text.setText(""); // } } }); viewer = new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION); Table table = viewer.getTable(); table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); table.setHeaderVisible(true); table.setLinesVisible(true); colSummary = new TableViewerColumn(viewer, SWT.NONE); colSummary.getColumn().setWidth(100); colSummary.setEditingSupport(new EditingSupport(viewer) { @Override protected void setValue(Object element, Object value) { Todo todo = (Todo) element; todo.setSummary(String.valueOf(value)); viewer.refresh(); } @Override protected Object getValue(Object element) { Todo todo = (Todo) element; return todo.getSummary(); } @Override protected CellEditor getCellEditor(Object element) { return new TextCellEditor(viewer.getTable(), SWT.NONE); } @Override protected boolean canEdit(Object element) { return true; } }); colDescription = new TableViewerColumn(viewer, SWT.NONE); colDescription.getColumn().setWidth(100); // We search in the summary and description field viewer.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { Todo todo = (Todo) element; return todo.getSummary().contains(searchString) || todo.getDescription().contains(searchString); } }); viewer.addSelectionChangedListener(new ISelectionChangedListener() { @Override public void selectionChanged(SelectionChangedEvent event) { IStructuredSelection selection = (IStructuredSelection) viewer .getSelection(); service.setSelection(selection.getFirstElement()); } }); menuService.registerContextMenu(viewer.getControl(), "com.example.e4.rcp.todo.popupmenu.table"); writableList = new WritableList(model.getTodos(), Todo.class); ViewerSupport.bind( viewer, writableList, BeanProperties.values(new String[] { Todo.FIELD_SUMMARY, Todo.FIELD_DESCRIPTION })); translateTable(message); } @Inject @Optional private void subscribeTopicTodoAllTopics( @UIEventTopic(MyEventConstants.TOPIC_TODO_ALLTOPICS) Event event) { if (viewer != null) { writableList.clear(); writableList.addAll(model.getTodos()); } } @Focus private void setFocus() { btnNewButton.setFocus(); } @Inject public void translateTable(@Translation Messages message){ if (viewer !=null && !viewer.getTable().isDisposed()) { colSummary.getColumn().setText(message.todo_summary); colDescription.getColumn().setText(message.todo_description); } } }
com.example.e4.rcp.todo/src/com/example/e4/rcp/todo/parts/TodoOverviewPart.java
package com.example.e4.rcp.todo.parts; import java.util.HashMap; import java.util.List; import javax.annotation.PostConstruct; import javax.inject.Inject; import org.eclipse.core.databinding.beans.BeanProperties; import org.eclipse.core.databinding.observable.list.WritableList; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.IJobManager; import org.eclipse.core.runtime.jobs.Job; import org.eclipse.core.runtime.jobs.ProgressProvider; import org.eclipse.e4.core.di.annotations.Optional; import org.eclipse.e4.core.services.events.IEventBroker; import org.eclipse.e4.core.services.nls.Translation; import org.eclipse.e4.ui.di.Focus; import org.eclipse.e4.ui.di.UIEventTopic; import org.eclipse.e4.ui.di.UISynchronize; import org.eclipse.e4.ui.model.application.MApplication; import org.eclipse.e4.ui.model.application.ui.basic.MWindow; import org.eclipse.e4.ui.model.application.ui.menu.MToolControl; import org.eclipse.e4.ui.services.EMenuService; import org.eclipse.e4.ui.workbench.modeling.EModelService; import org.eclipse.e4.ui.workbench.modeling.ESelectionService; import org.eclipse.jface.databinding.viewers.ViewerSupport; import org.eclipse.jface.viewers.CellEditor; import org.eclipse.jface.viewers.EditingSupport; import org.eclipse.jface.viewers.ISelectionChangedListener; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.jface.viewers.TableViewer; import org.eclipse.jface.viewers.TableViewerColumn; import org.eclipse.jface.viewers.TextCellEditor; import org.eclipse.jface.viewers.Viewer; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label; import org.eclipse.swt.widgets.Table; import org.eclipse.swt.widgets.Text; import org.osgi.service.event.Event; import com.example.e4.rcp.todo.events.MyEventConstants; import com.example.e4.rcp.todo.i18n.Messages; import com.example.e4.rcp.todo.model.ITodoService; import com.example.e4.rcp.todo.model.Todo; public class TodoOverviewPart { private Button btnNewButton; private Label lblNewLabel; private TableViewer viewer; @Inject UISynchronize sync; @Inject ESelectionService service; @Inject EModelService modelService; @Inject MApplication application; @Inject IEventBroker broker; @Inject ITodoService model; private WritableList writableList; protected String searchString = ""; private TableViewerColumn colDescription; private TableViewerColumn colSummary; @PostConstruct public void createControls(Composite parent, final MWindow window, EMenuService menuService, @Translation Messages message) { parent.setLayout(new GridLayout(1, false)); btnNewButton = new Button(parent, SWT.PUSH); btnNewButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { final MToolControl toolControl = (MToolControl) modelService.find( "statusbar", application); toolControl.setVisible(true); Job job = new Job("loading") { @Override protected IStatus run(IProgressMonitor monitor) { toolControl.setVisible(false); final List<Todo> list = model.getTodos(); System.out.println(list); broker.post( MyEventConstants.TOPIC_TODOS_CHANGED, new Event(MyEventConstants.TOPIC_TODOS_CHANGED, new HashMap<String, String>())); return Status.OK_STATUS; } }; if (toolControl != null) { IJobManager jobManager = job.getJobManager(); Object widget = toolControl.getObject(); final IProgressMonitor p = (IProgressMonitor) widget; ProgressProvider provider = new ProgressProvider() { @Override public IProgressMonitor createMonitor(Job job) { return p; } }; jobManager.setProgressProvider(provider); } job.schedule(); } }); btnNewButton.setText("Load Data"); Text search = new Text(parent, SWT.SEARCH | SWT.CANCEL | SWT.ICON_SEARCH); // Assuming that GridLayout is used search.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, false, false, 1, 1)); search.setMessage("Filter"); // Filter at every keystroke search.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { Text source = (Text) e.getSource(); searchString = source.getText(); // Trigger update in the viewer viewer.refresh(); } }); // SWT.SEARCH | SWT.CANCEL not supported under Windows7 // This does not work under Windows7 search.addSelectionListener(new SelectionAdapter() { @Override public void widgetDefaultSelected(SelectionEvent e) { if (e.detail == SWT.CANCEL) { Text text = (Text) e.getSource(); text.setText(""); // } } }); viewer = new TableViewer(parent, SWT.MULTI | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION); Table table = viewer.getTable(); table.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1)); table.setHeaderVisible(true); table.setLinesVisible(true); colSummary = new TableViewerColumn(viewer, SWT.NONE); colSummary.getColumn().setWidth(100); colSummary.setEditingSupport(new EditingSupport(viewer) { @Override protected void setValue(Object element, Object value) { Todo todo = (Todo) element; todo.setSummary(String.valueOf(value)); viewer.refresh(); } @Override protected Object getValue(Object element) { Todo todo = (Todo) element; return todo.getSummary(); } @Override protected CellEditor getCellEditor(Object element) { return new TextCellEditor(viewer.getTable(), SWT.NONE); } @Override protected boolean canEdit(Object element) { return true; } }); colDescription = new TableViewerColumn(viewer, SWT.NONE); colDescription.getColumn().setWidth(100); // We search in the summary and description field viewer.addFilter(new ViewerFilter() { @Override public boolean select(Viewer viewer, Object parentElement, Object element) { Todo todo = (Todo) element; return todo.getSummary().contains(searchString) || todo.getDescription().contains(searchString); } }); viewer.addSelectionChangedListener(new ISelectionChangedListener() { @Override public void selectionChanged(SelectionChangedEvent event) { IStructuredSelection selection = (IStructuredSelection) viewer .getSelection(); service.setSelection(selection.getFirstElement()); } }); menuService.registerContextMenu(viewer.getControl(), "com.example.e4.rcp.todo.popupmenu.table"); writableList = new WritableList(model.getTodos(), Todo.class); ViewerSupport.bind( viewer, writableList, BeanProperties.values(new String[] { Todo.FIELD_SUMMARY, Todo.FIELD_DESCRIPTION })); translateTable(message); } @Inject @Optional private void subscribeTopicTodoAllTopics( @UIEventTopic(MyEventConstants.TOPIC_TODO_ALLTOPICS) Event event) { if (viewer != null) { writableList.clear(); writableList.addAll(model.getTodos()); } } @Focus private void setFocus() { btnNewButton.setFocus(); } @Inject public void translateTable(@Translation Messages message){ if (viewer !=null && !viewer.getTable().isDisposed()) { colSummary.getColumn().setText(message.todo_summary); colDescription.getColumn().setText(message.todo_description); } } }
Removes unnecessary injected MWindow from TodoOverviewPart
com.example.e4.rcp.todo/src/com/example/e4/rcp/todo/parts/TodoOverviewPart.java
Removes unnecessary injected MWindow from TodoOverviewPart
Java
mpl-2.0
3ad2139642348e31a114b5388a3ac338320dde10
0
joansmith/RoyalCommands,RoyalDev/RoyalCommands
package org.royaldev.royalcommands.rcommands; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.royaldev.royalcommands.RUtils; import org.royaldev.royalcommands.RoyalCommands; public class Rank implements CommandExecutor { RoyalCommands plugin; public Rank(RoyalCommands plugin) { this.plugin = plugin; } @Override public boolean onCommand(CommandSender cs, Command cmd, String label, String[] args) { if (cmd.getName().equalsIgnoreCase("rank")) { if (!plugin.isAuthorized(cs, "rcmds.rank")) { RUtils.dispNoPerms(cs); return true; } if (args.length < 1) { cs.sendMessage(cmd.getDescription()); return false; } Player victim = plugin.getServer().getPlayer(args[0]); if (victim == null || plugin.isVanished(victim)) { cs.sendMessage(ChatColor.RED + "That player does not exist!"); return true; } String rank = RoyalCommands.permission.getPrimaryGroup(victim); cs.sendMessage(ChatColor.BLUE + "The user " + ChatColor.GRAY + victim.getName() + ChatColor.BLUE + " has the group " + ChatColor.GRAY + rank + ChatColor.BLUE + "."); return true; } return false; } }
src/org/royaldev/royalcommands/rcommands/Rank.java
package org.royaldev.royalcommands.rcommands; import org.bukkit.ChatColor; import org.bukkit.command.Command; import org.bukkit.command.CommandExecutor; import org.bukkit.command.CommandSender; import org.bukkit.entity.Player; import org.royaldev.royalcommands.RUtils; import org.royaldev.royalcommands.RoyalCommands; public class Rank implements CommandExecutor { RoyalCommands plugin; public Rank(RoyalCommands plugin) { this.plugin = plugin; } @Override public boolean onCommand(CommandSender cs, Command cmd, String label, String[] args) { if (cmd.getName().equalsIgnoreCase("rank")) { if (!plugin.isAuthorized(cs, "rcmds.rank")) { RUtils.dispNoPerms(cs); return true; } if (args.length < 1) { cs.sendMessage(cmd.getDescription()); return false; } Player victim = plugin.getServer().getPlayer(args[0]); if (victim == null || plugin.isVanished(victim)) { cs.sendMessage(ChatColor.RED + "That player does not exist!"); return true; } String rank = RoyalCommands.permission.getPrimaryGroup(victim); cs.sendMessage("The user " + victim.getName() + " has the group " + rank + "."); return true; } return false; } }
Added: Color /rank
src/org/royaldev/royalcommands/rcommands/Rank.java
Added: Color /rank
Java
agpl-3.0
6fee85db7fb4fdc8b9b8a0d5145aeae538220a7b
0
Skelril/Aurora
package com.skelril.aurora.city.engine.arena.factory; import com.sk89q.commandbook.CommandBook; import com.sk89q.worldedit.blocks.ItemID; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import com.skelril.aurora.city.engine.arena.AbstractRegionedArena; import com.skelril.aurora.util.ChatUtil; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.entity.Entity; import org.bukkit.entity.Item; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.potion.PotionType; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; public class FactoryMech extends AbstractRegionedArena { private final CommandBook inst = CommandBook.inst(); private final Logger log = inst.getLogger(); private final Server server = CommandBook.server(); public FactoryMech(World world, ProtectedRegion region) { super(world, region); } ConcurrentHashMap<Integer, Integer> typeAmtHash = new ConcurrentHashMap<>(); private static final List<Integer> wanted = new ArrayList<>(); static { wanted.add(ItemID.GLASS_BOTTLE); wanted.add(ItemID.NETHER_WART_SEED); wanted.add(ItemID.LIGHTSTONE_DUST); wanted.add(ItemID.REDSTONE_DUST); wanted.add(ItemID.SULPHUR); wanted.add(ItemID.MAGMA_CREAM); wanted.add(ItemID.SUGAR); wanted.add(ItemID.GLISTERING_MELON); wanted.add(ItemID.SPIDER_EYE); wanted.add(ItemID.GHAST_TEAR); wanted.add(ItemID.BLAZE_POWDER); wanted.add(ItemID.FERMENTED_SPIDER_EYE); wanted.add(ItemID.GOLDEN_CARROT); } public List<ItemStack> process() { Player[] playerList = getContainedPlayers(1); ItemStack workingStack; int total; Entity[] contained = getContainedEntities(); if (contained.length > 0) ChatUtil.sendNotice(playerList, "Processing..."); for (Entity e : contained) { // Kill contained living entities if (e instanceof LivingEntity) { ((LivingEntity) e).setHealth(0); continue; } // Find items and destroy those unwanted if (e instanceof Item) { workingStack = ((Item) e).getItemStack(); // Add the item to the list if (wanted.contains(workingStack.getTypeId())) { total = workingStack.getAmount(); ChatUtil.sendNotice(playerList, "Found: " + total + " " + workingStack.getType().toString() + "."); if (typeAmtHash.containsKey(workingStack.getTypeId())) { total += typeAmtHash.get(workingStack.getTypeId()); } typeAmtHash.put(workingStack.getTypeId(), total); } } e.remove(); } // Check these to avoid doing more calculations than need be int bottles = typeAmtHash.containsKey(ItemID.GLASS_BOTTLE) ? typeAmtHash.get(ItemID.GLASS_BOTTLE) : 0; int max = typeAmtHash.containsKey(ItemID.NETHER_WART_SEED) ? typeAmtHash.get(ItemID.NETHER_WART_SEED) : 0; if (bottles <= 0 || max <= 0) return new ArrayList<>(); // Figure out the potion the player is trying to make List<Integer> using = new ArrayList<>(); PotionType target; if (typeAmtHash.containsKey(ItemID.MAGMA_CREAM)) { target = PotionType.FIRE_RESISTANCE; using.add(ItemID.MAGMA_CREAM); } else if (typeAmtHash.containsKey(ItemID.SUGAR)) { target = PotionType.SPEED; using.add(ItemID.SUGAR); } else if (typeAmtHash.containsKey(ItemID.GLISTERING_MELON)) { target = PotionType.INSTANT_HEAL; using.add(ItemID.GLISTERING_MELON); } else if (typeAmtHash.containsKey(ItemID.SPIDER_EYE)) { target = PotionType.POISON; using.add(ItemID.SPIDER_EYE); } else if (typeAmtHash.containsKey(ItemID.GHAST_TEAR)) { target = PotionType.REGEN; using.add(ItemID.GHAST_TEAR); } else if (typeAmtHash.containsKey(ItemID.BLAZE_POWDER)) { target = PotionType.STRENGTH; using.add(ItemID.BLAZE_POWDER); } else if (typeAmtHash.containsKey(ItemID.FERMENTED_SPIDER_EYE)) { target = PotionType.WEAKNESS; using.add(ItemID.FERMENTED_SPIDER_EYE); } else if (typeAmtHash.containsKey(ItemID.GOLDEN_CARROT)) { target = PotionType.NIGHT_VISION; using.add(ItemID.GOLDEN_CARROT); } else return new ArrayList<>(); // Always used using.add(ItemID.GLASS_BOTTLE); using.add(ItemID.NETHER_WART_SEED); // Setup some important information boolean duration, potency, splash; duration = typeAmtHash.keySet().contains(ItemID.REDSTONE_DUST); potency = typeAmtHash.keySet().contains(ItemID.LIGHTSTONE_DUST); splash = typeAmtHash.keySet().contains(ItemID.SULPHUR); // Adapt as needed based on the information above if (duration && !target.isInstant()) { using.add(ItemID.REDSTONE_DUST); } else if (potency) { using.add(ItemID.LIGHTSTONE_DUST); } if (splash) { using.add(ItemID.SULPHUR); } // Find the max amount skipping glass bottles (too be checked later) for (Integer used : using) { if (used == ItemID.GLASS_BOTTLE) continue; max = Math.min(max, typeAmtHash.get(used)); } // This is confusing, essentially we are dividing the bottle count into three pieces // That allows us to figure out how many potion sets can be made // We will later expand the potion sets again max = (int) Math.min(max, Math.floor(bottles / 3)); if (max <= 0) return new ArrayList<>(); // Remove the used ingredients from the system int newAmt; for (Map.Entry<Integer, Integer> entry : typeAmtHash.entrySet()) { if (using.contains(entry.getKey())) { newAmt = entry.getValue() - (entry.getKey() == ItemID.GLASS_BOTTLE ? max * 3 : max); if (newAmt > 0) typeAmtHash.put(entry.getKey(), newAmt); else typeAmtHash.remove(entry.getKey()); } } // Inflate potion quantity max *= 3; // Calculate damage int level = !duration && potency ? 2 : 1; short dmg = toDamageValue(target, level, splash, duration && !target.isInstant()); // Tell the player what we are making ChatUtil.sendNotice(playerList, "Brewing: " + max + " " + target.toString() + " " + (level == 1 ? "I" : "II") + " " + (splash ? "splash" : "") + " potions."); // Return the product for the que List<ItemStack> product = new ArrayList<>(); for (int i = 0; i < max; i++) product.add(new ItemStack(ItemID.POTION, 1, dmg)); return product; } /** * Copied from the Bukkit potion class * * Converts this potion to a valid potion damage short, usable for potion * item stacks. * * @return The damage value of this potion */ public short toDamageValue(PotionType type, int level, boolean splash, boolean extended) { short damage; if (type == PotionType.WATER) { return 0; } else if (type == null) { damage = 0; } else { damage = (short) (level - 1); damage <<= 5; damage |= (short) type.getDamageValue(); } if (splash) { damage |= 0x4000; } if (extended) { damage |= 0x40; } return damage; } }
src/main/java/com/skelril/aurora/city/engine/arena/factory/FactoryMech.java
package com.skelril.aurora.city.engine.arena.factory; import com.sk89q.commandbook.CommandBook; import com.sk89q.worldedit.blocks.ItemID; import com.sk89q.worldguard.protection.regions.ProtectedRegion; import com.skelril.aurora.city.engine.arena.AbstractRegionedArena; import com.skelril.aurora.util.ChatUtil; import org.bukkit.Server; import org.bukkit.World; import org.bukkit.entity.Entity; import org.bukkit.entity.Item; import org.bukkit.entity.LivingEntity; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import org.bukkit.potion.PotionType; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.logging.Logger; public class FactoryMech extends AbstractRegionedArena { private final CommandBook inst = CommandBook.inst(); private final Logger log = inst.getLogger(); private final Server server = CommandBook.server(); public FactoryMech(World world, ProtectedRegion region) { super(world, region); } ConcurrentHashMap<Integer, Integer> typeAmtHash = new ConcurrentHashMap<>(); private static final List<Integer> wanted = new ArrayList<>(); static { wanted.add(ItemID.GLASS_BOTTLE); wanted.add(ItemID.NETHER_WART_SEED); wanted.add(ItemID.LIGHTSTONE_DUST); wanted.add(ItemID.REDSTONE_DUST); wanted.add(ItemID.SULPHUR); wanted.add(ItemID.MAGMA_CREAM); wanted.add(ItemID.SUGAR); wanted.add(ItemID.GLISTERING_MELON); wanted.add(ItemID.SPIDER_EYE); wanted.add(ItemID.GHAST_TEAR); wanted.add(ItemID.BLAZE_POWDER); wanted.add(ItemID.FERMENTED_SPIDER_EYE); wanted.add(ItemID.GOLDEN_CARROT); } public List<ItemStack> process() { Player[] playerList = getContainedPlayers(1); ItemStack workingStack; int total; Entity[] contained = getContainedEntities(); if (contained.length > 0) ChatUtil.sendNotice(playerList, "Processing..."); for (Entity e : contained) { if (e instanceof LivingEntity) ((LivingEntity) e).setHealth(0); if (e instanceof Item) { workingStack = ((Item) e).getItemStack(); if (wanted.contains(workingStack.getTypeId())) { total = workingStack.getAmount(); ChatUtil.sendNotice(playerList, "Found: " + total + " " + workingStack.getType().toString() + "."); if (typeAmtHash.containsKey(workingStack.getTypeId())) { total += typeAmtHash.get(workingStack.getTypeId()); } typeAmtHash.put(workingStack.getTypeId(), total); } e.remove(); } } boolean duration, potency, splash; duration = typeAmtHash.keySet().contains(ItemID.REDSTONE_DUST); potency = typeAmtHash.keySet().contains(ItemID.LIGHTSTONE_DUST); splash = typeAmtHash.keySet().contains(ItemID.SULPHUR); int max = typeAmtHash.containsKey(ItemID.GLASS_BOTTLE) ? typeAmtHash.get(ItemID.GLASS_BOTTLE) : 0; if (typeAmtHash.containsKey(ItemID.NETHER_WART_SEED)) { max = Math.min(max, typeAmtHash.get(ItemID.NETHER_WART_SEED)); } else max = 0; if (max <= 0) return new ArrayList<>(); List<Integer> using = new ArrayList<>(); PotionType target; if (typeAmtHash.containsKey(ItemID.MAGMA_CREAM)) { target = PotionType.FIRE_RESISTANCE; using.add(ItemID.MAGMA_CREAM); } else if (typeAmtHash.containsKey(ItemID.SUGAR)) { target = PotionType.SPEED; using.add(ItemID.SUGAR); } else if (typeAmtHash.containsKey(ItemID.GLISTERING_MELON)) { target = PotionType.INSTANT_HEAL; using.add(ItemID.GLISTERING_MELON); } else if (typeAmtHash.containsKey(ItemID.SPIDER_EYE)) { target = PotionType.POISON; using.add(ItemID.SPIDER_EYE); } else if (typeAmtHash.containsKey(ItemID.GHAST_TEAR)) { target = PotionType.REGEN; using.add(ItemID.GHAST_TEAR); } else if (typeAmtHash.containsKey(ItemID.BLAZE_POWDER)) { target = PotionType.STRENGTH; using.add(ItemID.BLAZE_POWDER); } else if (typeAmtHash.containsKey(ItemID.FERMENTED_SPIDER_EYE)) { target = PotionType.WEAKNESS; using.add(ItemID.FERMENTED_SPIDER_EYE); } else if (typeAmtHash.containsKey(ItemID.GOLDEN_CARROT)) { target = PotionType.NIGHT_VISION; using.add(ItemID.GOLDEN_CARROT); } else return new ArrayList<>(); using.add(ItemID.GLASS_BOTTLE); using.add(ItemID.NETHER_WART_SEED); if (duration && !target.isInstant()) { using.add(ItemID.REDSTONE_DUST); } else if (potency) { using.add(ItemID.LIGHTSTONE_DUST); } if (splash) { using.add(ItemID.SULPHUR); } for (Integer used : using) { max = Math.min(max, typeAmtHash.get(used)); } if (max <= 0) return new ArrayList<>(); int newAmt; for (Map.Entry<Integer, Integer> entry : typeAmtHash.entrySet()) { if (using.contains(entry.getKey())) { newAmt = entry.getValue() - max; if (newAmt > 0) typeAmtHash.put(entry.getKey(), newAmt); else typeAmtHash.remove(entry.getKey()); } } int level = !duration && potency ? 2 : 1; short dmg = toDamageValue(target, level, splash, duration && !target.isInstant()); ChatUtil.sendNotice(playerList, "Brewing: " + max + " " + target.toString() + " " + (level == 1 ? "I" : "II") + " " + (splash ? "splash" : "") + " potions."); List<ItemStack> product = new ArrayList<>(); for (int i = 0; i < max; i++) product.add(new ItemStack(ItemID.POTION, 1, dmg)); return product; } /** * Copied from the Bukkit potion class * * Converts this potion to a valid potion damage short, usable for potion * item stacks. * * @return The damage value of this potion */ public short toDamageValue(PotionType type, int level, boolean splash, boolean extended) { short damage; if (type == PotionType.WATER) { return 0; } else if (type == null) { damage = 0; } else { damage = (short) (level - 1); damage <<= 5; damage |= (short) type.getDamageValue(); } if (splash) { damage |= 0x4000; } if (extended) { damage |= 0x40; } return damage; } }
Fixed scaling system to be 1:3
src/main/java/com/skelril/aurora/city/engine/arena/factory/FactoryMech.java
Fixed scaling system to be 1:3
Java
apache-2.0
c9dd842edc357fe0da336a5dddeed0f4a4da20dd
0
AndroidX/androidx,AndroidX/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx,androidx/androidx,AndroidX/androidx,androidx/androidx,androidx/androidx
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.camera.view; import static androidx.camera.view.transform.OutputTransform.getNormalizedToBuffer; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.Rect; import android.hardware.camera2.CameraCharacteristics; import android.os.Build; import android.util.AttributeSet; import android.util.Rational; import android.util.Size; import android.view.Display; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.Surface; import android.view.SurfaceView; import android.view.TextureView; import android.view.View; import android.view.ViewConfiguration; import android.widget.FrameLayout; import androidx.annotation.AnyThread; import androidx.annotation.ColorRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import androidx.annotation.UiThread; import androidx.annotation.VisibleForTesting; import androidx.annotation.experimental.UseExperimental; import androidx.camera.core.CameraControl; import androidx.camera.core.CameraInfo; import androidx.camera.core.CameraSelector; import androidx.camera.core.ExperimentalUseCaseGroup; import androidx.camera.core.FocusMeteringAction; import androidx.camera.core.Logger; import androidx.camera.core.MeteringPoint; import androidx.camera.core.MeteringPointFactory; import androidx.camera.core.Preview; import androidx.camera.core.SurfaceRequest; import androidx.camera.core.UseCase; import androidx.camera.core.UseCaseGroup; import androidx.camera.core.ViewPort; import androidx.camera.core.impl.CameraInternal; import androidx.camera.core.impl.ImageOutputConfig; import androidx.camera.core.impl.utils.Threads; import androidx.camera.view.transform.CoordinateTransform; import androidx.camera.view.transform.OutputTransform; import androidx.core.content.ContextCompat; import androidx.lifecycle.LiveData; import androidx.lifecycle.MutableLiveData; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; /** * Custom View that displays the camera feed for CameraX's {@link Preview} use case. * * <p> This class manages the preview {@link Surface}'s lifecycle. It internally uses either a * {@link TextureView} or {@link SurfaceView} to display the camera feed, and applies required * transformations on them to correctly display the preview, this involves correcting their * aspect ratio, scale and rotation. * * <p> If {@link PreviewView} uses a {@link SurfaceView} to display the preview * stream, be careful when overlapping a {@link View} that's initially not visible (either * {@link View#INVISIBLE} or {@link View#GONE}) on top of it. When the {@link SurfaceView} is * attached to the display window, it calls * {@link android.view.ViewParent#requestTransparentRegion(View)} which requests a computation of * the transparent regions on the display. At this point, the {@link View} isn't visible, causing * the overlapped region between the {@link SurfaceView} and the {@link View} to be * considered transparent. Later if the {@link View} becomes {@linkplain View#VISIBLE visible}, it * will not be displayed on top of {@link SurfaceView}. A way around this is to call * {@link android.view.ViewParent#requestTransparentRegion(View)} right after making the * {@link View} visible, or initially hiding the {@link View} by setting its * {@linkplain View#setAlpha(float) opacity} to 0, then setting it to 1.0F to show it. */ public final class PreviewView extends FrameLayout { private static final String TAG = "PreviewView"; @ColorRes static final int DEFAULT_BACKGROUND_COLOR = android.R.color.black; private static final ImplementationMode DEFAULT_IMPL_MODE = ImplementationMode.PERFORMANCE; // Synthetic access @SuppressWarnings("WeakerAccess") @NonNull ImplementationMode mImplementationMode = DEFAULT_IMPL_MODE; @VisibleForTesting @Nullable PreviewViewImplementation mImplementation; @NonNull final PreviewTransformation mPreviewTransform = new PreviewTransformation(); // Synthetic access @SuppressWarnings("WeakerAccess") @NonNull final MutableLiveData<StreamState> mPreviewStreamStateLiveData = new MutableLiveData<>(StreamState.IDLE); // Synthetic access @SuppressWarnings("WeakerAccess") @Nullable final AtomicReference<PreviewStreamStateObserver> mActiveStreamStateObserver = new AtomicReference<>(); // Synthetic access @SuppressWarnings("WeakerAccess") CameraController mCameraController; @NonNull PreviewViewMeteringPointFactory mPreviewViewMeteringPointFactory = new PreviewViewMeteringPointFactory(mPreviewTransform); // Detector for zoom-to-scale. @NonNull private final ScaleGestureDetector mScaleGestureDetector; @Nullable private MotionEvent mTouchUpEvent; private final OnLayoutChangeListener mOnLayoutChangeListener = (v, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) -> { boolean isSizeChanged = right - left != oldRight - oldLeft || bottom - top != oldBottom - oldTop; if (isSizeChanged) { redrawPreview(); attachToControllerIfReady(true); } }; // Synthetic access @SuppressWarnings("WeakerAccess") final Preview.SurfaceProvider mSurfaceProvider = new Preview.SurfaceProvider() { @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) @Override @AnyThread public void onSurfaceRequested(@NonNull SurfaceRequest surfaceRequest) { if (!Threads.isMainThread()) { // Post on main thread to ensure thread safety. ContextCompat.getMainExecutor(getContext()).execute( () -> mSurfaceProvider.onSurfaceRequested(surfaceRequest)); return; } Logger.d(TAG, "Surface requested by Preview."); CameraInternal camera = surfaceRequest.getCamera(); surfaceRequest.setTransformationInfoListener( ContextCompat.getMainExecutor(getContext()), transformationInfo -> { Logger.d(TAG, "Preview transformation info updated. " + transformationInfo); // TODO(b/159127402): maybe switch to COMPATIBLE mode if target // rotation is not display rotation. boolean isFrontCamera = camera.getCameraInfoInternal().getLensFacing() == CameraSelector.LENS_FACING_FRONT; mPreviewTransform.setTransformationInfo(transformationInfo, surfaceRequest.getResolution(), isFrontCamera); redrawPreview(); }); mImplementation = shouldUseTextureView(surfaceRequest, mImplementationMode) ? new TextureViewImplementation(PreviewView.this, mPreviewTransform) : new SurfaceViewImplementation(PreviewView.this, mPreviewTransform); PreviewStreamStateObserver streamStateObserver = new PreviewStreamStateObserver(camera.getCameraInfoInternal(), mPreviewStreamStateLiveData, mImplementation); mActiveStreamStateObserver.set(streamStateObserver); camera.getCameraState().addObserver( ContextCompat.getMainExecutor(getContext()), streamStateObserver); mImplementation.onSurfaceRequested(surfaceRequest, () -> { // We've no longer needed this observer, if there is no new StreamStateObserver // (another SurfaceRequest), reset the streamState to IDLE. // This is needed for the case when unbinding preview while other use cases are // still bound. if (mActiveStreamStateObserver.compareAndSet(streamStateObserver, null)) { streamStateObserver.updatePreviewStreamState(StreamState.IDLE); } streamStateObserver.clear(); camera.getCameraState().removeObserver(streamStateObserver); }); } }; @UiThread public PreviewView(@NonNull Context context) { this(context, null); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs) { this(context, attrs, 0); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { this(context, attrs, defStyleAttr, 0); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); Threads.checkMainThread(); final TypedArray attributes = context.getTheme().obtainStyledAttributes(attrs, R.styleable.PreviewView, defStyleAttr, defStyleRes); if (Build.VERSION.SDK_INT >= 29) { saveAttributeDataForStyleable(context, R.styleable.PreviewView, attrs, attributes, defStyleAttr, defStyleRes); } try { final int scaleTypeId = attributes.getInteger( R.styleable.PreviewView_scaleType, mPreviewTransform.getScaleType().getId()); setScaleType(ScaleType.fromId(scaleTypeId)); int implementationModeId = attributes.getInteger(R.styleable.PreviewView_implementationMode, DEFAULT_IMPL_MODE.getId()); setImplementationMode(ImplementationMode.fromId(implementationModeId)); } finally { attributes.recycle(); } mScaleGestureDetector = new ScaleGestureDetector( context, new PinchToZoomOnScaleGestureListener()); // Set background only if it wasn't already set. A default background prevents the content // behind the PreviewView from being visible before the preview starts streaming. if (getBackground() == null) { setBackgroundColor(ContextCompat.getColor(getContext(), DEFAULT_BACKGROUND_COLOR)); } } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); addOnLayoutChangeListener(mOnLayoutChangeListener); if (mImplementation != null) { mImplementation.onAttachedToWindow(); } attachToControllerIfReady(true); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); removeOnLayoutChangeListener(mOnLayoutChangeListener); if (mImplementation != null) { mImplementation.onDetachedFromWindow(); } if (mCameraController != null) { mCameraController.clearPreviewSurface(); } } @Override public boolean onTouchEvent(@NonNull MotionEvent event) { if (mCameraController == null) { // Do not consume events if controller is not set. return super.onTouchEvent(event); } boolean isSingleTouch = event.getPointerCount() == 1; boolean isUpEvent = event.getAction() == MotionEvent.ACTION_UP; boolean notALongPress = event.getEventTime() - event.getDownTime() < ViewConfiguration.getLongPressTimeout(); if (isSingleTouch && isUpEvent && notALongPress) { // If the event is a click, invoke tap-to-focus and forward it to user's // OnClickListener#onClick. mTouchUpEvent = event; performClick(); // A click has been detected and forwarded. Consume the event so onClick won't be // invoked twice. return true; } return mScaleGestureDetector.onTouchEvent(event) || super.onTouchEvent(event); } @Override public boolean performClick() { if (mCameraController != null) { // mTouchUpEvent == null means it's an accessibility click. Focus at the center instead. float x = mTouchUpEvent != null ? mTouchUpEvent.getX() : getWidth() / 2f; float y = mTouchUpEvent != null ? mTouchUpEvent.getY() : getHeight() / 2f; mCameraController.onTapToFocus(mPreviewViewMeteringPointFactory, x, y); } mTouchUpEvent = null; return super.performClick(); } /** * Sets the {@link ImplementationMode} for the {@link PreviewView}. * * <p> {@link PreviewView} displays the preview with a {@link TextureView} when the * mode is {@link ImplementationMode#COMPATIBLE}, and tries to use a {@link SurfaceView} if * it is {@link ImplementationMode#PERFORMANCE} when possible, which depends on the device's * attributes (e.g. API level, camera hardware support level). If not set, the default mode * is {@link ImplementationMode#PERFORMANCE}. * * <p> This method needs to be called before the {@link Preview.SurfaceProvider} is set on * {@link Preview}. Once changed, {@link Preview.SurfaceProvider} needs to be set again. e.g. * {@code preview.setSurfaceProvider(previewView.getSurfaceProvider())}. */ @UiThread public void setImplementationMode(@NonNull final ImplementationMode implementationMode) { Threads.checkMainThread(); mImplementationMode = implementationMode; } /** * Returns the {@link ImplementationMode}. * * <p> If nothing is set via {@link #setImplementationMode}, the default * value is {@link ImplementationMode#PERFORMANCE}. * * @return The {@link ImplementationMode} for {@link PreviewView}. */ @UiThread @NonNull public ImplementationMode getImplementationMode() { Threads.checkMainThread(); return mImplementationMode; } /** * Gets a {@link Preview.SurfaceProvider} to be used with * {@link Preview#setSurfaceProvider(Executor, Preview.SurfaceProvider)}. This allows the * camera feed to start when the {@link Preview} use case is bound to a lifecycle. * * <p> The returned {@link Preview.SurfaceProvider} will provide a preview {@link Surface} to * the camera that's either managed by a {@link TextureView} or {@link SurfaceView} depending * on the {@link ImplementationMode} and the device's attributes (e.g. API level, camera * hardware support level). * * @return A {@link Preview.SurfaceProvider} to attach to a {@link Preview} use case. * @see ImplementationMode */ @UiThread @NonNull @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) public Preview.SurfaceProvider getSurfaceProvider() { Threads.checkMainThread(); return mSurfaceProvider; } /** * Applies a {@link ScaleType} to the preview. * * <p> If a {@link CameraController} is attached to {@link PreviewView}, the change will take * immediate effect. It also takes immediate effect if {@link #getViewPort()} is not set in * the bound {@link UseCaseGroup}. Otherwise, the {@link UseCase}s need to be bound again * with the latest value of {@link #getViewPort()}. * * <p> This value can also be set in the layout XML file via the {@code app:scaleType} * attribute. * * <p> The default value is {@link ScaleType#FILL_CENTER}. * * @param scaleType A {@link ScaleType} to apply to the preview. * @attr name app:scaleType */ @UiThread public void setScaleType(@NonNull final ScaleType scaleType) { Threads.checkMainThread(); mPreviewTransform.setScaleType(scaleType); redrawPreview(); // Notify controller to re-calculate the crop rect. attachToControllerIfReady(false); } /** * Returns the {@link ScaleType} currently applied to the preview. * * <p> The default value is {@link ScaleType#FILL_CENTER}. * * @return The {@link ScaleType} currently applied to the preview. */ @UiThread @NonNull public ScaleType getScaleType() { Threads.checkMainThread(); return mPreviewTransform.getScaleType(); } /** * Gets the {@link MeteringPointFactory} for the camera currently connected to the * {@link PreviewView}, if any. * * <p> The returned {@link MeteringPointFactory} is capable of creating {@link MeteringPoint}s * from (x, y) coordinates in the {@link PreviewView}. This conversion takes into account its * {@link ScaleType}. The {@link MeteringPointFactory} is automatically adjusted if the * {@link PreviewView} layout or the {@link ScaleType} changes. * * <p> The {@link MeteringPointFactory} returns invalid {@link MeteringPoint} if the * preview is not ready, or the {@link PreviewView} dimension is zero. The invalid * {@link MeteringPoint} will cause * {@link CameraControl#startFocusAndMetering(FocusMeteringAction)} to fail but it won't * crash the application. Wait for the {@link StreamState#STREAMING} state to make sure the * preview is ready. * * @return a {@link MeteringPointFactory} * @see #getPreviewStreamState() */ @UiThread @NonNull public MeteringPointFactory getMeteringPointFactory() { Threads.checkMainThread(); return mPreviewViewMeteringPointFactory; } /** * Gets a {@link LiveData} for the preview {@link StreamState}. * * <p>There are two preview stream states, {@link StreamState#IDLE} and * {@link StreamState#STREAMING}. {@link StreamState#IDLE} indicates the preview is currently * not visible and streaming is stopped. {@link StreamState#STREAMING} means the preview is * streaming or is about to start streaming. This state guarantees the preview is visible * only when the {@link ImplementationMode} is {@link ImplementationMode#COMPATIBLE}. When in * {@link ImplementationMode#PERFORMANCE} mode, it is possible the preview becomes * visible slightly after the state changes to {@link StreamState#STREAMING}. * * <p>Apps that require a precise signal for when the preview starts should * {@linkplain #setImplementationMode(ImplementationMode) set} the implementation mode to * {@link ImplementationMode#COMPATIBLE}. * * @return A {@link LiveData} of the preview's {@link StreamState}. Apps can get the current * state with {@link LiveData#getValue()}, or register an observer with * {@link LiveData#observe} . */ @NonNull public LiveData<StreamState> getPreviewStreamState() { return mPreviewStreamStateLiveData; } /** * Returns a {@link Bitmap} representation of the content displayed on the * {@link PreviewView}, or {@code null} if the camera preview hasn't started yet. * <p> * The returned {@link Bitmap} uses the {@link Bitmap.Config#ARGB_8888} pixel format and its * dimensions are the same as this view's. * <p> * <strong>Do not</strong> invoke this method from a drawing method * ({@link View#onDraw(Canvas)} for instance). * <p> * If an error occurs during the copy, an empty {@link Bitmap} will be returned. * <p> * If the preview hasn't started yet, the method may return null or an empty {@link Bitmap}. Use * {@link #getPreviewStreamState()} to get the {@link StreamState} and wait for * {@link StreamState#STREAMING} to make sure the preview is started. * * @return A {@link Bitmap.Config#ARGB_8888} {@link Bitmap} representing the content * displayed on the {@link PreviewView}, or null if the camera preview hasn't started yet. */ @UiThread @Nullable public Bitmap getBitmap() { Threads.checkMainThread(); return mImplementation == null ? null : mImplementation.getBitmap(); } /** * Gets a {@link ViewPort} based on the current status of {@link PreviewView}. * * <p> Returns a {@link ViewPort} instance based on the {@link PreviewView}'s current width, * height, layout direction, scale type and display rotation. By using the {@link ViewPort}, all * the {@link UseCase}s in the {@link UseCaseGroup} will have the same output image that also * matches the aspect ratio of the {@link PreviewView}. * * @return null if the view is not currently attached or the view's width/height is zero. * @see ViewPort * @see UseCaseGroup */ @UiThread @Nullable @ExperimentalUseCaseGroup public ViewPort getViewPort() { Threads.checkMainThread(); if (getDisplay() == null) { // Returns null if the layout is not ready. return null; } return getViewPort(getDisplay().getRotation()); } /** * Gets a {@link ViewPort} with custom target rotation. * * <p>Returns a {@link ViewPort} instance based on the {@link PreviewView}'s current width, * height, layout direction, scale type and the given target rotation. * * <p>Use this method if {@link Preview}'s desired rotation is not the default display * rotation. For example, when remote display is in use and the desired rotation for the * remote display is based on the accelerometer reading. In that case, use * {@link android.view.OrientationEventListener} to obtain the target rotation and create * {@link ViewPort} as following: * <p>{@link android.view.OrientationEventListener#ORIENTATION_UNKNOWN}: orientation == -1 * <p>{@link Surface#ROTATION_0}: orientation >= 315 || orientation < 45 * <p>{@link Surface#ROTATION_90}: orientation >= 225 && orientation < 315 * <p>{@link Surface#ROTATION_180}: orientation >= 135 && orientation < 225 * <p>{@link Surface#ROTATION_270}: orientation >= 45 && orientation < 135 * * <p> Once the target rotation is obtained, use it with {@link Preview#setTargetRotation} to * update the rotation. Example: * * <pre><code> * Preview preview = new Preview.Builder().setTargetRotation(targetRotation).build(); * ViewPort viewPort = previewView.getViewPort(targetRotation); * UseCaseGroup useCaseGroup = * new UseCaseGroup.Builder().setViewPort(viewPort).addUseCase(preview).build(); * cameraProvider.bindToLifecycle(lifecycleOwner, cameraSelector, useCaseGroup); * </code></pre> * * <p> Note that for non-display rotation to work, the mode must be set to * {@link ImplementationMode#COMPATIBLE}. * * @param targetRotation A rotation value, expressed as one of * {@link Surface#ROTATION_0}, {@link Surface#ROTATION_90}, * {@link Surface#ROTATION_180}, or * {@link Surface#ROTATION_270}. * @return null if the view's width/height is zero. * @see ImplementationMode */ @UiThread @SuppressLint("WrongConstant") @Nullable @ExperimentalUseCaseGroup public ViewPort getViewPort(@ImageOutputConfig.RotationValue int targetRotation) { Threads.checkMainThread(); if (getWidth() == 0 || getHeight() == 0) { return null; } return new ViewPort.Builder(new Rational(getWidth(), getHeight()), targetRotation) .setScaleType(getViewPortScaleType()) .setLayoutDirection(getLayoutDirection()) .build(); } /** * Converts {@link PreviewView.ScaleType} to {@link ViewPort.ScaleType}. */ private int getViewPortScaleType() { switch (getScaleType()) { case FILL_END: return ViewPort.FILL_END; case FILL_CENTER: return ViewPort.FILL_CENTER; case FILL_START: return ViewPort.FILL_START; case FIT_END: // Fallthrough case FIT_CENTER: // Fallthrough case FIT_START: return ViewPort.FIT; default: throw new IllegalStateException("Unexpected scale type: " + getScaleType()); } } // Synthetic access @SuppressWarnings("WeakerAccess") void redrawPreview() { if (mImplementation != null) { mImplementation.redrawPreview(); } mPreviewViewMeteringPointFactory.recalculate(new Size(getWidth(), getHeight()), getLayoutDirection()); } // Synthetic access @SuppressWarnings("WeakerAccess") boolean shouldUseTextureView(@NonNull SurfaceRequest surfaceRequest, @NonNull final ImplementationMode implementationMode) { // TODO(b/159127402): use TextureView if target rotation is not display rotation. boolean isLegacyDevice = surfaceRequest.getCamera().getCameraInfoInternal() .getImplementationType().equals(CameraInfo.IMPLEMENTATION_TYPE_CAMERA2_LEGACY); if (surfaceRequest.isRGBA8888Required() || Build.VERSION.SDK_INT <= 24 || isLegacyDevice) { // Force to use TextureView when the device is running android 7.0 and below, legacy // level or RGBA8888 is required. return true; } switch (implementationMode) { case COMPATIBLE: return true; case PERFORMANCE: return false; default: throw new IllegalArgumentException( "Invalid implementation mode: " + implementationMode); } } /** * The implementation mode of a {@link PreviewView}. * * <p> User preference on how the {@link PreviewView} should render the preview. * {@link PreviewView} displays the preview with either a {@link SurfaceView} or a * {@link TextureView}. A {@link SurfaceView} is generally better than a {@link TextureView} * when it comes to certain key metrics, including power and latency. On the other hand, * {@link TextureView} is better supported by a wider range of devices. The option is used by * {@link PreviewView} to decide what is the best internal implementation given the device * capabilities and user configurations. */ public enum ImplementationMode { /** * Use a {@link SurfaceView} for the preview when possible. If the device * doesn't support {@link SurfaceView}, {@link PreviewView} will fall back to use a * {@link TextureView} instead. * * <p>{@link PreviewView} falls back to {@link TextureView} when the API level is 24 or * lower, the camera hardware support level is * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY}, or * {@link Preview#getTargetRotation()} is different from {@link PreviewView}'s display * rotation. * * <p>Do not use this mode if {@link Preview.Builder#setTargetRotation(int)} is set * to a value different than the display's rotation, because {@link SurfaceView} does not * support arbitrary rotations. Do not use this mode if the {@link PreviewView} * needs to be animated. {@link SurfaceView} animation is not supported on API level 24 * or lower. Also, for the preview's streaming state provided in * {@link #getPreviewStreamState}, the {@link StreamState#STREAMING} state might happen * prematurely if this mode is used. * * @see Preview.Builder#setTargetRotation(int) * @see Preview.Builder#getTargetRotation() * @see Display#getRotation() * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY * @see StreamState#STREAMING */ PERFORMANCE(0), /** * Use a {@link TextureView} for the preview. */ COMPATIBLE(1); private final int mId; ImplementationMode(int id) { mId = id; } int getId() { return mId; } static ImplementationMode fromId(int id) { for (ImplementationMode implementationMode : values()) { if (implementationMode.mId == id) { return implementationMode; } } throw new IllegalArgumentException("Unknown implementation mode id " + id); } } /** Options for scaling the preview vis-à-vis its container {@link PreviewView}. */ public enum ScaleType { /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and align it to the start of the view, which is the top left * corner in a left-to-right (LTR) layout, or the top right corner in a right-to-left * (RTL) layout. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_START(0), /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and center it in the view. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_CENTER(1), /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and align it to the end of the view, which is the bottom right * corner in a left-to-right (LTR) layout, or the bottom left corner in a right-to-left * (RTL) layout. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_END(2), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and align it to the start of the view, which is the * top left corner in a left-to-right (LTR) layout, or the top right corner in a * right-to-left (RTL) layout. The background area not covered by the preview stream * will be black or the background of the {@link PreviewView} * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_START(3), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and center it inside the view. The background area not * covered by the preview stream will be black or the background of the {@link PreviewView}. * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_CENTER(4), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and align it to the end of the view, which is the * bottom right corner in a left-to-right (LTR) layout, or the bottom left corner in a * right-to-left (RTL) layout. The background area not covered by the preview stream * will be black or the background of the {@link PreviewView}. * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_END(5); private final int mId; ScaleType(int id) { mId = id; } int getId() { return mId; } static ScaleType fromId(int id) { for (ScaleType scaleType : values()) { if (scaleType.mId == id) { return scaleType; } } throw new IllegalArgumentException("Unknown scale type id " + id); } } /** * Definitions for the preview stream state. */ public enum StreamState { /** Preview is not visible yet. */ IDLE, /** * Preview is streaming. * * <p>This state only guarantees the preview is streaming when the implementation mode is * {@link ImplementationMode#COMPATIBLE}. When in {@link ImplementationMode#PERFORMANCE} * mode, it is possible that the preview becomes visible slightly after the state has * changed. For apps requiring a precise signal for when the preview starts, please set * {@link ImplementationMode#COMPATIBLE} mode via {@link #setImplementationMode}. */ STREAMING } /** * GestureListener that speeds up scale factor and sends it to controller. */ class PinchToZoomOnScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScale(ScaleGestureDetector detector) { if (mCameraController != null) { mCameraController.onPinchToZoom(detector.getScaleFactor()); } return true; } } /** * Sets the {@link CameraController}. * * <p> Once set, the controller will use {@link PreviewView} to display camera preview feed. * It also uses the {@link PreviewView}'s layout dimension to set the crop rect for all the use * cases so that the output from other use cases match what the end user sees in * {@link PreviewView}. It also enables features like tap-to-focus and pinch-to-zoom. * * <p> Setting it to {@code null} or to a different {@link CameraController} stops the previous * {@link CameraController} from working. The previous {@link CameraController} will remain * detached until it's set on the {@link PreviewView} again. * * @throws IllegalArgumentException If the {@link CameraController}'s camera selector * is unable to resolve a camera to be used for the enabled * use cases. * @see CameraController */ @UiThread public void setController(@Nullable CameraController cameraController) { Threads.checkMainThread(); if (mCameraController != null && mCameraController != cameraController) { // If already bound to a different controller, ask the old controller to stop // using this PreviewView. mCameraController.clearPreviewSurface(); } mCameraController = cameraController; attachToControllerIfReady(/*shouldFailSilently=*/false); } /** * Get the {@link CameraController}. */ @Nullable @UiThread public CameraController getController() { Threads.checkMainThread(); return mCameraController; } /** * Gets the {@link OutputTransform} associated with the {@link PreviewView}. * * <p> Returns a {@link OutputTransform} object that represents the transform being applied to * the associated {@link Preview} use case. Returns null if the transform info is not ready. * For example, when the associated {@link Preview} has not been bound or the * {@link PreviewView}'s layout is not ready. * * <p> {@link PreviewView} needs to be in {@link ImplementationMode#COMPATIBLE} mode for the * transform to work correctly. For example, the returned {@link OutputTransform} may * not respect the value of {@link #getScaleX()} when {@link ImplementationMode#PERFORMANCE} * mode is used. * * @return the transform applied on the preview by this {@link PreviewView}. * @hide * @see CoordinateTransform */ // TODO(b/179827713): unhide this once all transform utils are done. @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) @TransformExperimental @Nullable public OutputTransform getOutputTransform() { Threads.checkMainThread(); Matrix matrix = null; try { matrix = mPreviewTransform.getSurfaceToPreviewViewMatrix( new Size(getWidth(), getHeight()), getLayoutDirection()); } catch (IllegalStateException ex) { // Fall-through. It will be handled below. } Rect surfaceCropRect = mPreviewTransform.getSurfaceCropRect(); if (matrix == null || surfaceCropRect == null) { Logger.d(TAG, "Transform info is not ready"); return null; } // Map it to the normalized space (0, 0) - (1, 1). matrix.preConcat(getNormalizedToBuffer(surfaceCropRect)); // Add the custom transform applied by the app. e.g. View#setScaleX. if (mImplementation instanceof TextureViewImplementation) { matrix.postConcat(getMatrix()); } else { Logger.w(TAG, "PreviewView needs to be in COMPATIBLE mode for the transform" + " to work correctly."); } return new OutputTransform(matrix, new Size(surfaceCropRect.width(), surfaceCropRect.height())); } @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) private void attachToControllerIfReady(boolean shouldFailSilently) { Display display = getDisplay(); ViewPort viewPort = getViewPort(); if (mCameraController != null && viewPort != null && isAttachedToWindow() && display != null) { try { mCameraController.attachPreviewSurface(getSurfaceProvider(), viewPort, display); } catch (IllegalStateException ex) { if (shouldFailSilently) { // Swallow the exception and fail silently if the method is invoked by View // events. Logger.e(TAG, ex.getMessage(), ex); } else { throw ex; } } } } }
camera/camera-view/src/main/java/androidx/camera/view/PreviewView.java
/* * Copyright 2019 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.camera.view; import static androidx.camera.view.transform.OutputTransform.getNormalizedToBuffer; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.Rect; import android.hardware.camera2.CameraCharacteristics; import android.os.Build; import android.util.AttributeSet; import android.util.Rational; import android.util.Size; import android.view.Display; import android.view.MotionEvent; import android.view.ScaleGestureDetector; import android.view.Surface; import android.view.SurfaceView; import android.view.TextureView; import android.view.View; import android.view.ViewConfiguration; import android.widget.FrameLayout; import androidx.annotation.AnyThread; import androidx.annotation.ColorRes; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.annotation.RestrictTo; import androidx.annotation.UiThread; import androidx.annotation.VisibleForTesting; import androidx.annotation.experimental.UseExperimental; import androidx.camera.core.CameraControl; import androidx.camera.core.CameraInfo; import androidx.camera.core.CameraSelector; import androidx.camera.core.ExperimentalUseCaseGroup; import androidx.camera.core.FocusMeteringAction; import androidx.camera.core.Logger; import androidx.camera.core.MeteringPoint; import androidx.camera.core.MeteringPointFactory; import androidx.camera.core.Preview; import androidx.camera.core.SurfaceRequest; import androidx.camera.core.UseCase; import androidx.camera.core.UseCaseGroup; import androidx.camera.core.ViewPort; import androidx.camera.core.impl.CameraInfoInternal; import androidx.camera.core.impl.CameraInternal; import androidx.camera.core.impl.ImageOutputConfig; import androidx.camera.core.impl.utils.Threads; import androidx.camera.view.transform.CoordinateTransform; import androidx.camera.view.transform.OutputTransform; import androidx.core.content.ContextCompat; import androidx.lifecycle.LiveData; import androidx.lifecycle.MutableLiveData; import java.util.concurrent.Executor; import java.util.concurrent.atomic.AtomicReference; /** * Custom View that displays the camera feed for CameraX's {@link Preview} use case. * * <p> This class manages the preview {@link Surface}'s lifecycle. It internally uses either a * {@link TextureView} or {@link SurfaceView} to display the camera feed, and applies required * transformations on them to correctly display the preview, this involves correcting their * aspect ratio, scale and rotation. * * <p> If {@link PreviewView} uses a {@link SurfaceView} to display the preview * stream, be careful when overlapping a {@link View} that's initially not visible (either * {@link View#INVISIBLE} or {@link View#GONE}) on top of it. When the {@link SurfaceView} is * attached to the display window, it calls * {@link android.view.ViewParent#requestTransparentRegion(View)} which requests a computation of * the transparent regions on the display. At this point, the {@link View} isn't visible, causing * the overlapped region between the {@link SurfaceView} and the {@link View} to be * considered transparent. Later if the {@link View} becomes {@linkplain View#VISIBLE visible}, it * will not be displayed on top of {@link SurfaceView}. A way around this is to call * {@link android.view.ViewParent#requestTransparentRegion(View)} right after making the * {@link View} visible, or initially hiding the {@link View} by setting its * {@linkplain View#setAlpha(float) opacity} to 0, then setting it to 1.0F to show it. */ public final class PreviewView extends FrameLayout { private static final String TAG = "PreviewView"; @ColorRes static final int DEFAULT_BACKGROUND_COLOR = android.R.color.black; private static final ImplementationMode DEFAULT_IMPL_MODE = ImplementationMode.PERFORMANCE; // Synthetic access @SuppressWarnings("WeakerAccess") @NonNull ImplementationMode mImplementationMode = DEFAULT_IMPL_MODE; @VisibleForTesting @Nullable PreviewViewImplementation mImplementation; @NonNull final PreviewTransformation mPreviewTransform = new PreviewTransformation(); // Synthetic access @SuppressWarnings("WeakerAccess") @NonNull final MutableLiveData<StreamState> mPreviewStreamStateLiveData = new MutableLiveData<>(StreamState.IDLE); // Synthetic access @SuppressWarnings("WeakerAccess") @Nullable final AtomicReference<PreviewStreamStateObserver> mActiveStreamStateObserver = new AtomicReference<>(); // Synthetic access @SuppressWarnings("WeakerAccess") CameraController mCameraController; @NonNull PreviewViewMeteringPointFactory mPreviewViewMeteringPointFactory = new PreviewViewMeteringPointFactory(mPreviewTransform); // Detector for zoom-to-scale. @NonNull private final ScaleGestureDetector mScaleGestureDetector; @Nullable private MotionEvent mTouchUpEvent; private final OnLayoutChangeListener mOnLayoutChangeListener = (v, left, top, right, bottom, oldLeft, oldTop, oldRight, oldBottom) -> { boolean isSizeChanged = right - left != oldRight - oldLeft || bottom - top != oldBottom - oldTop; if (isSizeChanged) { redrawPreview(); attachToControllerIfReady(true); } }; // Synthetic access @SuppressWarnings("WeakerAccess") final Preview.SurfaceProvider mSurfaceProvider = new Preview.SurfaceProvider() { @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) @Override @AnyThread public void onSurfaceRequested(@NonNull SurfaceRequest surfaceRequest) { if (!Threads.isMainThread()) { // Post on main thread to ensure thread safety. ContextCompat.getMainExecutor(getContext()).execute( () -> mSurfaceProvider.onSurfaceRequested(surfaceRequest)); return; } Logger.d(TAG, "Surface requested by Preview."); CameraInternal camera = surfaceRequest.getCamera(); surfaceRequest.setTransformationInfoListener( ContextCompat.getMainExecutor(getContext()), transformationInfo -> { Logger.d(TAG, "Preview transformation info updated. " + transformationInfo); // TODO(b/159127402): maybe switch to COMPATIBLE mode if target // rotation is not display rotation. boolean isFrontCamera = camera.getCameraInfoInternal().getLensFacing() == CameraSelector.LENS_FACING_FRONT; mPreviewTransform.setTransformationInfo(transformationInfo, surfaceRequest.getResolution(), isFrontCamera); redrawPreview(); }); mImplementation = shouldUseTextureView(surfaceRequest, mImplementationMode) ? new TextureViewImplementation(PreviewView.this, mPreviewTransform) : new SurfaceViewImplementation(PreviewView.this, mPreviewTransform); PreviewStreamStateObserver streamStateObserver = new PreviewStreamStateObserver((CameraInfoInternal) camera.getCameraInfo(), mPreviewStreamStateLiveData, mImplementation); mActiveStreamStateObserver.set(streamStateObserver); camera.getCameraState().addObserver( ContextCompat.getMainExecutor(getContext()), streamStateObserver); mImplementation.onSurfaceRequested(surfaceRequest, () -> { // We've no longer needed this observer, if there is no new StreamStateObserver // (another SurfaceRequest), reset the streamState to IDLE. // This is needed for the case when unbinding preview while other use cases are // still bound. if (mActiveStreamStateObserver.compareAndSet(streamStateObserver, null)) { streamStateObserver.updatePreviewStreamState(StreamState.IDLE); } streamStateObserver.clear(); camera.getCameraState().removeObserver(streamStateObserver); }); } }; @UiThread public PreviewView(@NonNull Context context) { this(context, null); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs) { this(context, attrs, 0); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { this(context, attrs, defStyleAttr, 0); } @UiThread public PreviewView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); Threads.checkMainThread(); final TypedArray attributes = context.getTheme().obtainStyledAttributes(attrs, R.styleable.PreviewView, defStyleAttr, defStyleRes); if (Build.VERSION.SDK_INT >= 29) { saveAttributeDataForStyleable(context, R.styleable.PreviewView, attrs, attributes, defStyleAttr, defStyleRes); } try { final int scaleTypeId = attributes.getInteger( R.styleable.PreviewView_scaleType, mPreviewTransform.getScaleType().getId()); setScaleType(ScaleType.fromId(scaleTypeId)); int implementationModeId = attributes.getInteger(R.styleable.PreviewView_implementationMode, DEFAULT_IMPL_MODE.getId()); setImplementationMode(ImplementationMode.fromId(implementationModeId)); } finally { attributes.recycle(); } mScaleGestureDetector = new ScaleGestureDetector( context, new PinchToZoomOnScaleGestureListener()); // Set background only if it wasn't already set. A default background prevents the content // behind the PreviewView from being visible before the preview starts streaming. if (getBackground() == null) { setBackgroundColor(ContextCompat.getColor(getContext(), DEFAULT_BACKGROUND_COLOR)); } } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); addOnLayoutChangeListener(mOnLayoutChangeListener); if (mImplementation != null) { mImplementation.onAttachedToWindow(); } attachToControllerIfReady(true); } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); removeOnLayoutChangeListener(mOnLayoutChangeListener); if (mImplementation != null) { mImplementation.onDetachedFromWindow(); } if (mCameraController != null) { mCameraController.clearPreviewSurface(); } } @Override public boolean onTouchEvent(@NonNull MotionEvent event) { if (mCameraController == null) { // Do not consume events if controller is not set. return super.onTouchEvent(event); } boolean isSingleTouch = event.getPointerCount() == 1; boolean isUpEvent = event.getAction() == MotionEvent.ACTION_UP; boolean notALongPress = event.getEventTime() - event.getDownTime() < ViewConfiguration.getLongPressTimeout(); if (isSingleTouch && isUpEvent && notALongPress) { // If the event is a click, invoke tap-to-focus and forward it to user's // OnClickListener#onClick. mTouchUpEvent = event; performClick(); // A click has been detected and forwarded. Consume the event so onClick won't be // invoked twice. return true; } return mScaleGestureDetector.onTouchEvent(event) || super.onTouchEvent(event); } @Override public boolean performClick() { if (mCameraController != null) { // mTouchUpEvent == null means it's an accessibility click. Focus at the center instead. float x = mTouchUpEvent != null ? mTouchUpEvent.getX() : getWidth() / 2f; float y = mTouchUpEvent != null ? mTouchUpEvent.getY() : getHeight() / 2f; mCameraController.onTapToFocus(mPreviewViewMeteringPointFactory, x, y); } mTouchUpEvent = null; return super.performClick(); } /** * Sets the {@link ImplementationMode} for the {@link PreviewView}. * * <p> {@link PreviewView} displays the preview with a {@link TextureView} when the * mode is {@link ImplementationMode#COMPATIBLE}, and tries to use a {@link SurfaceView} if * it is {@link ImplementationMode#PERFORMANCE} when possible, which depends on the device's * attributes (e.g. API level, camera hardware support level). If not set, the default mode * is {@link ImplementationMode#PERFORMANCE}. * * <p> This method needs to be called before the {@link Preview.SurfaceProvider} is set on * {@link Preview}. Once changed, {@link Preview.SurfaceProvider} needs to be set again. e.g. * {@code preview.setSurfaceProvider(previewView.getSurfaceProvider())}. */ @UiThread public void setImplementationMode(@NonNull final ImplementationMode implementationMode) { Threads.checkMainThread(); mImplementationMode = implementationMode; } /** * Returns the {@link ImplementationMode}. * * <p> If nothing is set via {@link #setImplementationMode}, the default * value is {@link ImplementationMode#PERFORMANCE}. * * @return The {@link ImplementationMode} for {@link PreviewView}. */ @UiThread @NonNull public ImplementationMode getImplementationMode() { Threads.checkMainThread(); return mImplementationMode; } /** * Gets a {@link Preview.SurfaceProvider} to be used with * {@link Preview#setSurfaceProvider(Executor, Preview.SurfaceProvider)}. This allows the * camera feed to start when the {@link Preview} use case is bound to a lifecycle. * * <p> The returned {@link Preview.SurfaceProvider} will provide a preview {@link Surface} to * the camera that's either managed by a {@link TextureView} or {@link SurfaceView} depending * on the {@link ImplementationMode} and the device's attributes (e.g. API level, camera * hardware support level). * * @return A {@link Preview.SurfaceProvider} to attach to a {@link Preview} use case. * @see ImplementationMode */ @UiThread @NonNull @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) public Preview.SurfaceProvider getSurfaceProvider() { Threads.checkMainThread(); return mSurfaceProvider; } /** * Applies a {@link ScaleType} to the preview. * * <p> If a {@link CameraController} is attached to {@link PreviewView}, the change will take * immediate effect. It also takes immediate effect if {@link #getViewPort()} is not set in * the bound {@link UseCaseGroup}. Otherwise, the {@link UseCase}s need to be bound again * with the latest value of {@link #getViewPort()}. * * <p> This value can also be set in the layout XML file via the {@code app:scaleType} * attribute. * * <p> The default value is {@link ScaleType#FILL_CENTER}. * * @param scaleType A {@link ScaleType} to apply to the preview. * @attr name app:scaleType */ @UiThread public void setScaleType(@NonNull final ScaleType scaleType) { Threads.checkMainThread(); mPreviewTransform.setScaleType(scaleType); redrawPreview(); // Notify controller to re-calculate the crop rect. attachToControllerIfReady(false); } /** * Returns the {@link ScaleType} currently applied to the preview. * * <p> The default value is {@link ScaleType#FILL_CENTER}. * * @return The {@link ScaleType} currently applied to the preview. */ @UiThread @NonNull public ScaleType getScaleType() { Threads.checkMainThread(); return mPreviewTransform.getScaleType(); } /** * Gets the {@link MeteringPointFactory} for the camera currently connected to the * {@link PreviewView}, if any. * * <p> The returned {@link MeteringPointFactory} is capable of creating {@link MeteringPoint}s * from (x, y) coordinates in the {@link PreviewView}. This conversion takes into account its * {@link ScaleType}. The {@link MeteringPointFactory} is automatically adjusted if the * {@link PreviewView} layout or the {@link ScaleType} changes. * * <p> The {@link MeteringPointFactory} returns invalid {@link MeteringPoint} if the * preview is not ready, or the {@link PreviewView} dimension is zero. The invalid * {@link MeteringPoint} will cause * {@link CameraControl#startFocusAndMetering(FocusMeteringAction)} to fail but it won't * crash the application. Wait for the {@link StreamState#STREAMING} state to make sure the * preview is ready. * * @return a {@link MeteringPointFactory} * @see #getPreviewStreamState() */ @UiThread @NonNull public MeteringPointFactory getMeteringPointFactory() { Threads.checkMainThread(); return mPreviewViewMeteringPointFactory; } /** * Gets a {@link LiveData} for the preview {@link StreamState}. * * <p>There are two preview stream states, {@link StreamState#IDLE} and * {@link StreamState#STREAMING}. {@link StreamState#IDLE} indicates the preview is currently * not visible and streaming is stopped. {@link StreamState#STREAMING} means the preview is * streaming or is about to start streaming. This state guarantees the preview is visible * only when the {@link ImplementationMode} is {@link ImplementationMode#COMPATIBLE}. When in * {@link ImplementationMode#PERFORMANCE} mode, it is possible the preview becomes * visible slightly after the state changes to {@link StreamState#STREAMING}. * * <p>Apps that require a precise signal for when the preview starts should * {@linkplain #setImplementationMode(ImplementationMode) set} the implementation mode to * {@link ImplementationMode#COMPATIBLE}. * * @return A {@link LiveData} of the preview's {@link StreamState}. Apps can get the current * state with {@link LiveData#getValue()}, or register an observer with * {@link LiveData#observe} . */ @NonNull public LiveData<StreamState> getPreviewStreamState() { return mPreviewStreamStateLiveData; } /** * Returns a {@link Bitmap} representation of the content displayed on the * {@link PreviewView}, or {@code null} if the camera preview hasn't started yet. * <p> * The returned {@link Bitmap} uses the {@link Bitmap.Config#ARGB_8888} pixel format and its * dimensions are the same as this view's. * <p> * <strong>Do not</strong> invoke this method from a drawing method * ({@link View#onDraw(Canvas)} for instance). * <p> * If an error occurs during the copy, an empty {@link Bitmap} will be returned. * <p> * If the preview hasn't started yet, the method may return null or an empty {@link Bitmap}. Use * {@link #getPreviewStreamState()} to get the {@link StreamState} and wait for * {@link StreamState#STREAMING} to make sure the preview is started. * * @return A {@link Bitmap.Config#ARGB_8888} {@link Bitmap} representing the content * displayed on the {@link PreviewView}, or null if the camera preview hasn't started yet. */ @UiThread @Nullable public Bitmap getBitmap() { Threads.checkMainThread(); return mImplementation == null ? null : mImplementation.getBitmap(); } /** * Gets a {@link ViewPort} based on the current status of {@link PreviewView}. * * <p> Returns a {@link ViewPort} instance based on the {@link PreviewView}'s current width, * height, layout direction, scale type and display rotation. By using the {@link ViewPort}, all * the {@link UseCase}s in the {@link UseCaseGroup} will have the same output image that also * matches the aspect ratio of the {@link PreviewView}. * * @return null if the view is not currently attached or the view's width/height is zero. * @see ViewPort * @see UseCaseGroup */ @UiThread @Nullable @ExperimentalUseCaseGroup public ViewPort getViewPort() { Threads.checkMainThread(); if (getDisplay() == null) { // Returns null if the layout is not ready. return null; } return getViewPort(getDisplay().getRotation()); } /** * Gets a {@link ViewPort} with custom target rotation. * * <p>Returns a {@link ViewPort} instance based on the {@link PreviewView}'s current width, * height, layout direction, scale type and the given target rotation. * * <p>Use this method if {@link Preview}'s desired rotation is not the default display * rotation. For example, when remote display is in use and the desired rotation for the * remote display is based on the accelerometer reading. In that case, use * {@link android.view.OrientationEventListener} to obtain the target rotation and create * {@link ViewPort} as following: * <p>{@link android.view.OrientationEventListener#ORIENTATION_UNKNOWN}: orientation == -1 * <p>{@link Surface#ROTATION_0}: orientation >= 315 || orientation < 45 * <p>{@link Surface#ROTATION_90}: orientation >= 225 && orientation < 315 * <p>{@link Surface#ROTATION_180}: orientation >= 135 && orientation < 225 * <p>{@link Surface#ROTATION_270}: orientation >= 45 && orientation < 135 * * <p> Once the target rotation is obtained, use it with {@link Preview#setTargetRotation} to * update the rotation. Example: * * <pre><code> * Preview preview = new Preview.Builder().setTargetRotation(targetRotation).build(); * ViewPort viewPort = previewView.getViewPort(targetRotation); * UseCaseGroup useCaseGroup = * new UseCaseGroup.Builder().setViewPort(viewPort).addUseCase(preview).build(); * cameraProvider.bindToLifecycle(lifecycleOwner, cameraSelector, useCaseGroup); * </code></pre> * * <p> Note that for non-display rotation to work, the mode must be set to * {@link ImplementationMode#COMPATIBLE}. * * @param targetRotation A rotation value, expressed as one of * {@link Surface#ROTATION_0}, {@link Surface#ROTATION_90}, * {@link Surface#ROTATION_180}, or * {@link Surface#ROTATION_270}. * @return null if the view's width/height is zero. * @see ImplementationMode */ @UiThread @SuppressLint("WrongConstant") @Nullable @ExperimentalUseCaseGroup public ViewPort getViewPort(@ImageOutputConfig.RotationValue int targetRotation) { Threads.checkMainThread(); if (getWidth() == 0 || getHeight() == 0) { return null; } return new ViewPort.Builder(new Rational(getWidth(), getHeight()), targetRotation) .setScaleType(getViewPortScaleType()) .setLayoutDirection(getLayoutDirection()) .build(); } /** * Converts {@link PreviewView.ScaleType} to {@link ViewPort.ScaleType}. */ private int getViewPortScaleType() { switch (getScaleType()) { case FILL_END: return ViewPort.FILL_END; case FILL_CENTER: return ViewPort.FILL_CENTER; case FILL_START: return ViewPort.FILL_START; case FIT_END: // Fallthrough case FIT_CENTER: // Fallthrough case FIT_START: return ViewPort.FIT; default: throw new IllegalStateException("Unexpected scale type: " + getScaleType()); } } // Synthetic access @SuppressWarnings("WeakerAccess") void redrawPreview() { if (mImplementation != null) { mImplementation.redrawPreview(); } mPreviewViewMeteringPointFactory.recalculate(new Size(getWidth(), getHeight()), getLayoutDirection()); } // Synthetic access @SuppressWarnings("WeakerAccess") boolean shouldUseTextureView(@NonNull SurfaceRequest surfaceRequest, @NonNull final ImplementationMode implementationMode) { // TODO(b/159127402): use TextureView if target rotation is not display rotation. boolean isLegacyDevice = surfaceRequest.getCamera().getCameraInfo() .getImplementationType().equals(CameraInfo.IMPLEMENTATION_TYPE_CAMERA2_LEGACY); if (surfaceRequest.isRGBA8888Required() || Build.VERSION.SDK_INT <= 24 || isLegacyDevice) { // Force to use TextureView when the device is running android 7.0 and below, legacy // level or RGBA8888 is required. return true; } switch (implementationMode) { case COMPATIBLE: return true; case PERFORMANCE: return false; default: throw new IllegalArgumentException( "Invalid implementation mode: " + implementationMode); } } /** * The implementation mode of a {@link PreviewView}. * * <p> User preference on how the {@link PreviewView} should render the preview. * {@link PreviewView} displays the preview with either a {@link SurfaceView} or a * {@link TextureView}. A {@link SurfaceView} is generally better than a {@link TextureView} * when it comes to certain key metrics, including power and latency. On the other hand, * {@link TextureView} is better supported by a wider range of devices. The option is used by * {@link PreviewView} to decide what is the best internal implementation given the device * capabilities and user configurations. */ public enum ImplementationMode { /** * Use a {@link SurfaceView} for the preview when possible. If the device * doesn't support {@link SurfaceView}, {@link PreviewView} will fall back to use a * {@link TextureView} instead. * * <p>{@link PreviewView} falls back to {@link TextureView} when the API level is 24 or * lower, the camera hardware support level is * {@link CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY}, or * {@link Preview#getTargetRotation()} is different from {@link PreviewView}'s display * rotation. * * <p>Do not use this mode if {@link Preview.Builder#setTargetRotation(int)} is set * to a value different than the display's rotation, because {@link SurfaceView} does not * support arbitrary rotations. Do not use this mode if the {@link PreviewView} * needs to be animated. {@link SurfaceView} animation is not supported on API level 24 * or lower. Also, for the preview's streaming state provided in * {@link #getPreviewStreamState}, the {@link StreamState#STREAMING} state might happen * prematurely if this mode is used. * * @see Preview.Builder#setTargetRotation(int) * @see Preview.Builder#getTargetRotation() * @see Display#getRotation() * @see CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY * @see StreamState#STREAMING */ PERFORMANCE(0), /** * Use a {@link TextureView} for the preview. */ COMPATIBLE(1); private final int mId; ImplementationMode(int id) { mId = id; } int getId() { return mId; } static ImplementationMode fromId(int id) { for (ImplementationMode implementationMode : values()) { if (implementationMode.mId == id) { return implementationMode; } } throw new IllegalArgumentException("Unknown implementation mode id " + id); } } /** Options for scaling the preview vis-à-vis its container {@link PreviewView}. */ public enum ScaleType { /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and align it to the start of the view, which is the top left * corner in a left-to-right (LTR) layout, or the top right corner in a right-to-left * (RTL) layout. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_START(0), /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and center it in the view. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_CENTER(1), /** * Scale the preview, maintaining the source aspect ratio, so it fills the entire * {@link PreviewView}, and align it to the end of the view, which is the bottom right * corner in a left-to-right (LTR) layout, or the bottom left corner in a right-to-left * (RTL) layout. * <p> * This may cause the preview to be cropped if the camera preview aspect ratio does not * match that of its container {@link PreviewView}. */ FILL_END(2), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and align it to the start of the view, which is the * top left corner in a left-to-right (LTR) layout, or the top right corner in a * right-to-left (RTL) layout. The background area not covered by the preview stream * will be black or the background of the {@link PreviewView} * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_START(3), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and center it inside the view. The background area not * covered by the preview stream will be black or the background of the {@link PreviewView}. * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_CENTER(4), /** * Scale the preview, maintaining the source aspect ratio, so it is entirely contained * within the {@link PreviewView}, and align it to the end of the view, which is the * bottom right corner in a left-to-right (LTR) layout, or the bottom left corner in a * right-to-left (RTL) layout. The background area not covered by the preview stream * will be black or the background of the {@link PreviewView}. * <p> * Both dimensions of the preview will be equal or less than the corresponding dimensions * of its container {@link PreviewView}. */ FIT_END(5); private final int mId; ScaleType(int id) { mId = id; } int getId() { return mId; } static ScaleType fromId(int id) { for (ScaleType scaleType : values()) { if (scaleType.mId == id) { return scaleType; } } throw new IllegalArgumentException("Unknown scale type id " + id); } } /** * Definitions for the preview stream state. */ public enum StreamState { /** Preview is not visible yet. */ IDLE, /** * Preview is streaming. * * <p>This state only guarantees the preview is streaming when the implementation mode is * {@link ImplementationMode#COMPATIBLE}. When in {@link ImplementationMode#PERFORMANCE} * mode, it is possible that the preview becomes visible slightly after the state has * changed. For apps requiring a precise signal for when the preview starts, please set * {@link ImplementationMode#COMPATIBLE} mode via {@link #setImplementationMode}. */ STREAMING } /** * GestureListener that speeds up scale factor and sends it to controller. */ class PinchToZoomOnScaleGestureListener extends ScaleGestureDetector.SimpleOnScaleGestureListener { @Override public boolean onScale(ScaleGestureDetector detector) { if (mCameraController != null) { mCameraController.onPinchToZoom(detector.getScaleFactor()); } return true; } } /** * Sets the {@link CameraController}. * * <p> Once set, the controller will use {@link PreviewView} to display camera preview feed. * It also uses the {@link PreviewView}'s layout dimension to set the crop rect for all the use * cases so that the output from other use cases match what the end user sees in * {@link PreviewView}. It also enables features like tap-to-focus and pinch-to-zoom. * * <p> Setting it to {@code null} or to a different {@link CameraController} stops the previous * {@link CameraController} from working. The previous {@link CameraController} will remain * detached until it's set on the {@link PreviewView} again. * * @throws IllegalArgumentException If the {@link CameraController}'s camera selector * is unable to resolve a camera to be used for the enabled * use cases. * @see CameraController */ @UiThread public void setController(@Nullable CameraController cameraController) { Threads.checkMainThread(); if (mCameraController != null && mCameraController != cameraController) { // If already bound to a different controller, ask the old controller to stop // using this PreviewView. mCameraController.clearPreviewSurface(); } mCameraController = cameraController; attachToControllerIfReady(/*shouldFailSilently=*/false); } /** * Get the {@link CameraController}. */ @Nullable @UiThread public CameraController getController() { Threads.checkMainThread(); return mCameraController; } /** * Gets the {@link OutputTransform} associated with the {@link PreviewView}. * * <p> Returns a {@link OutputTransform} object that represents the transform being applied to * the associated {@link Preview} use case. Returns null if the transform info is not ready. * For example, when the associated {@link Preview} has not been bound or the * {@link PreviewView}'s layout is not ready. * * <p> {@link PreviewView} needs to be in {@link ImplementationMode#COMPATIBLE} mode for the * transform to work correctly. For example, the returned {@link OutputTransform} may * not respect the value of {@link #getScaleX()} when {@link ImplementationMode#PERFORMANCE} * mode is used. * * @return the transform applied on the preview by this {@link PreviewView}. * @hide * @see CoordinateTransform */ // TODO(b/179827713): unhide this once all transform utils are done. @RestrictTo(RestrictTo.Scope.LIBRARY_GROUP) @TransformExperimental @Nullable public OutputTransform getOutputTransform() { Threads.checkMainThread(); Matrix matrix = null; try { matrix = mPreviewTransform.getSurfaceToPreviewViewMatrix( new Size(getWidth(), getHeight()), getLayoutDirection()); } catch (IllegalStateException ex) { // Fall-through. It will be handled below. } Rect surfaceCropRect = mPreviewTransform.getSurfaceCropRect(); if (matrix == null || surfaceCropRect == null) { Logger.d(TAG, "Transform info is not ready"); return null; } // Map it to the normalized space (0, 0) - (1, 1). matrix.preConcat(getNormalizedToBuffer(surfaceCropRect)); // Add the custom transform applied by the app. e.g. View#setScaleX. if (mImplementation instanceof TextureViewImplementation) { matrix.postConcat(getMatrix()); } else { Logger.w(TAG, "PreviewView needs to be in COMPATIBLE mode for the transform" + " to work correctly."); } return new OutputTransform(matrix, new Size(surfaceCropRect.width(), surfaceCropRect.height())); } @UseExperimental(markerClass = ExperimentalUseCaseGroup.class) private void attachToControllerIfReady(boolean shouldFailSilently) { Display display = getDisplay(); ViewPort viewPort = getViewPort(); if (mCameraController != null && viewPort != null && isAttachedToWindow() && display != null) { try { mCameraController.attachPreviewSurface(getSurfaceProvider(), viewPort, display); } catch (IllegalStateException ex) { if (shouldFailSilently) { // Swallow the exception and fail silently if the method is invoked by View // events. Logger.e(TAG, ex.getMessage(), ex); } else { throw ex; } } } } }
Avoid calling default CameraInternal#getCameraInfo Avoids calling CameraInternal#getCameraInfo() which contains a default implementation that calls CameraInternal#getCameraInfoInternal(). Instead just call CameraInternal#getCameraInfoInternal() directly. This was causing issues with camera-view's PreviewViewTest where getCameraInfo() was being treated as an abstract method rather than using the default interface implementation. This was causing an AbstractMethodError. Bug: 182561174 Test: PreviewViewTest on affected devices in test lab Change-Id: I906ddf0ed0f474834819ddfce523152eddf3e1a9
camera/camera-view/src/main/java/androidx/camera/view/PreviewView.java
Avoid calling default CameraInternal#getCameraInfo
Java
apache-2.0
4a788502d3db2bffe1c7f5d52c311d39b3cbe2b9
0
June92/Cardshifter,June92/Cardshifter,Cardshifter/Cardshifter,SirPython/Cardshifter,Cardshifter/Cardshifter,SirPython/Cardshifter,Cardshifter/Cardshifter,June92/Cardshifter,SirPython/Cardshifter
package com.cardshifter.client; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.function.Consumer; import javafx.application.Platform; import javafx.fxml.FXML; import javafx.scene.Node; import javafx.scene.control.Label; import javafx.scene.control.ListView; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; import javafx.stage.Stage; import org.apache.log4j.LogManager; import org.apache.log4j.Logger; import com.cardshifter.api.both.ChatMessage; import com.cardshifter.api.incoming.RequestTargetsMessage; import com.cardshifter.api.incoming.UseAbilityMessage; import com.cardshifter.api.messages.Message; import com.cardshifter.api.outgoing.AvailableTargetsMessage; import com.cardshifter.api.outgoing.CardInfoMessage; import com.cardshifter.api.outgoing.ClientDisconnectedMessage; import com.cardshifter.api.outgoing.EntityRemoveMessage; import com.cardshifter.api.outgoing.GameOverMessage; import com.cardshifter.api.outgoing.NewGameMessage; import com.cardshifter.api.outgoing.PlayerMessage; import com.cardshifter.api.outgoing.ResetAvailableActionsMessage; import com.cardshifter.api.outgoing.UpdateMessage; import com.cardshifter.api.outgoing.UseableActionMessage; import com.cardshifter.api.outgoing.WaitMessage; import com.cardshifter.api.outgoing.WelcomeMessage; import com.cardshifter.api.outgoing.ZoneChangeMessage; import com.cardshifter.api.outgoing.ZoneMessage; import com.cardshifter.client.views.ActionButton; import com.cardshifter.client.views.BattlefieldZoneView; import com.cardshifter.client.views.CardBattlefieldDocumentController; import com.cardshifter.client.views.CardHandDocumentController; import com.cardshifter.client.views.CardView; import com.cardshifter.client.views.PlayerHandZoneView; import com.cardshifter.client.views.ZoneView; public class GameClientController { private static final Logger logger = LogManager.getLogger(GameClientController.class); @FXML private AnchorPane rootPane; @FXML private Label loginMessage; @FXML private ListView<String> serverMessages; @FXML private VBox opponentStatBox; @FXML private VBox playerStatBox; @FXML private HBox actionBox; @FXML private HBox opponentHandPane; @FXML private HBox opponentBattlefieldPane; @FXML private Pane opponentDeckPane; @FXML private Label opponentDeckLabel; @FXML private HBox playerHandPane; @FXML private HBox playerBattlefieldPane; @FXML private Pane playerDeckPane; @FXML private Label playerDeckLabel; @FXML private Label playerName; @FXML private Label opponentName; private int gameId; private int playerIndex; private int opponentId; private int opponentHandId; private int opponentBattlefieldId; private int opponentDeckId; private final Map<Integer, Set<Integer>> deckEntityIds = new HashMap<>(); private int playerId; private int playerHandId; private int playerBattlefieldId; private int playerDeckId; private final Map<String, Integer> playerStatBoxMap = new HashMap<>(); private final Map<String, Integer> opponentStatBoxMap = new HashMap<>(); private final Map<Integer, ZoneView<?>> zoneViewMap = new HashMap<>(); private List<UseableActionMessage> savedMessages = new ArrayList<>(); private final Set<Integer> chosenTargets = new HashSet<>(); private AvailableTargetsMessage targetInfo; private Consumer<Message> sender; public void acceptConnectionSettings(NewGameMessage message, Consumer<Message> sender) { // this is passed into this object after it is automatically created by the FXML document this.playerIndex = message.getPlayerIndex(); this.gameId = message.getGameId(); logger.info(String.format("You are player: %d", this.playerIndex)); this.sender = sender; } public void createAndSendMessage(UseableActionMessage action) { if (action.isTargetRequired()) { this.send(new RequestTargetsMessage(gameId, action.getId(), action.getAction())); } else { this.send(new UseAbilityMessage(gameId, action.getId(), action.getAction(), action.getTargetId())); } //A new list of actions will be sent back from the server, so it is okay to clear them this.actionBox.getChildren().clear(); this.clearActiveFromAllCards(); } private void send(Message message) { this.sender.accept(message); } public void processMessageFromServer(Message message) { serverMessages.getItems().add(message.toString()); //this is for diagnostics so I can copy paste the messages to know their format logger.info(message); if (message instanceof WelcomeMessage) { Platform.runLater(() -> loginMessage.setText(message.toString())); } else if (message instanceof WaitMessage) { Platform.runLater(() -> loginMessage.setText(message.toString())); } else if (message instanceof PlayerMessage) { Platform.runLater(() -> this.processPlayerMessage((PlayerMessage)message)); } else if (message instanceof ZoneMessage) { this.assignZoneIdForZoneMessage((ZoneMessage)message); } else if (message instanceof CardInfoMessage) { this.processCardInfoMessage((CardInfoMessage)message); } else if (message instanceof UseableActionMessage) { this.savedMessages.add((UseableActionMessage)message); this.processUseableActionMessage((UseableActionMessage)message); } else if (message instanceof UpdateMessage) { this.processUpdateMessage((UpdateMessage)message); } else if (message instanceof ZoneChangeMessage) { this.processZoneChangeMessage((ZoneChangeMessage)message); } else if (message instanceof EntityRemoveMessage) { this.processEntityRemoveMessage((EntityRemoveMessage)message); } else if (message instanceof AvailableTargetsMessage) { this.processAvailableTargetsMessage((AvailableTargetsMessage)message); } else if (message instanceof ResetAvailableActionsMessage) { //this.processResetAvailableActionsMessage((ResetAvailableActionsMessage)message); this.clearSavedActions(); } else if (message instanceof ClientDisconnectedMessage) { this.processClientDisconnectedMessage((ClientDisconnectedMessage)message); } else if (message instanceof GameOverMessage) { this.processGameOverMessage((GameOverMessage)message); } } private void processPlayerMessage(PlayerMessage message) { if (message.getIndex() == this.playerIndex) { this.playerId = message.getId(); this.playerName.setText(message.getName()); this.processPlayerMessageForPlayer(message, playerStatBox, playerStatBoxMap); } else { this.opponentId = message.getId(); this.opponentName.setText(message.getName()); this.processPlayerMessageForPlayer(message, opponentStatBox, opponentStatBoxMap); this.loginMessage.setText("Opponent Connected"); } } private void processPlayerMessageForPlayer(PlayerMessage message, Pane statBox, Map<String, Integer> playerMap) { statBox.getChildren().clear(); Map<String, Integer> sortedMap = new TreeMap<>(message.getProperties()); playerMap.putAll(sortedMap); for (Map.Entry<String, Integer> entry : sortedMap.entrySet()) { String key = entry.getKey(); statBox.getChildren().add(new Label(key)); int value = entry.getValue(); statBox.getChildren().add(new Label(String.format("%d",value))); } } private void assignZoneIdForZoneMessage(ZoneMessage message) { if (!this.zoneViewMap.containsKey(message.getId())) { if (message.getName().equals("Battlefield")) { if(message.getOwner() == this.playerId) { this.playerBattlefieldId = message.getId(); this.zoneViewMap.put(message.getId(), new BattlefieldZoneView(message.getId(), playerBattlefieldPane)); } else { this.opponentBattlefieldId = message.getId(); this.zoneViewMap.put(message.getId(), new BattlefieldZoneView(message.getId(), opponentBattlefieldPane)); } } else if (message.getName().equals("Hand")) { if (message.getOwner() == this.playerId) { this.playerHandId = message.getId(); this.zoneViewMap.put(message.getId(), new PlayerHandZoneView(message.getId(), playerHandPane)); } else { this.opponentHandId = message.getId(); this.zoneViewMap.put(this.opponentHandId, new ZoneView<CardView>(message.getId(), opponentHandPane)); this.createOpponentHand(message); } } else if (message.getName().equals("Deck")) { if (message.getOwner() == this.playerId) { this.playerDeckId = message.getId(); this.deckEntityIds.put(message.getId(), new HashSet<>()); for (int entity : message.getEntities()) { this.addCardToDeck(playerDeckId, entity); } this.repaintDeckLabels(); this.zoneViewMap.put(message.getId(), new ZoneView<CardView>(message.getId(), playerDeckPane)); } else { this.opponentDeckId = message.getId(); this.deckEntityIds.put(message.getId(), new HashSet<>()); for (int entity : message.getEntities()) { this.addCardToDeck(opponentDeckId, entity); } this.repaintDeckLabels(); this.zoneViewMap.put(message.getId(), new ZoneView<CardView>(message.getId(), opponentDeckPane)); } } } } private void processCardInfoMessage(CardInfoMessage message) { int targetZone = message.getZone(); if (targetZone == opponentBattlefieldId) { this.addCardToOpponentBattlefieldPane(message); } else if (targetZone == opponentHandId) { this.addCardToOpponentHandPane(message); } else if (targetZone == playerBattlefieldId) { this.addCardToPlayerBattlefieldPane(message); } else if (targetZone == playerHandId) { this.addCardToPlayerHandPane(message); } } private void addCardToOpponentBattlefieldPane(CardInfoMessage message) { BattlefieldZoneView opponentBattlefield = getZoneView(opponentBattlefieldId); CardBattlefieldDocumentController card = new CardBattlefieldDocumentController(message, this); opponentBattlefield.addPane(message.getId(), card); } private void addCardToOpponentHandPane(CardInfoMessage message) { // this is unused because *KNOWN* cards don't pop up in opponent hand without reason (at least not now) } private void addCardToPlayerBattlefieldPane(CardInfoMessage message) { // this is unused because cards don't pop up in the battlefield magically, they are *moved* there (at least for now) } private void addCardToPlayerHandPane(CardInfoMessage message) { PlayerHandZoneView playerHand = getZoneView(playerHandId); CardHandDocumentController card = new CardHandDocumentController(message, this); playerHand.addPane(message.getId(), card); } private void processUseableActionMessage(UseableActionMessage message) { ZoneView<?> zoneView = getZoneViewForCard(message.getId()); logger.info("Usable message: " + message + " inform zone " + zoneView); if (zoneView == null) { this.createActionButton(message); return; } if (message.getAction().equals("Attack")) { ((BattlefieldZoneView)zoneView).setCardCanAttack(message.getId(),message); } else if (message.getAction().equals("Scrap")) { zoneView.setCardScrappable(message.getId(), message); } else { zoneView.setCardActive(message.getId(), message); } } private void processUpdateMessage(UpdateMessage message) { if (message.getId() == this.playerId) { this.processUpdateMessageForPlayer(playerStatBox, message, playerStatBoxMap); } else if (message.getId() == this.opponentId) { this.processUpdateMessageForPlayer(opponentStatBox, message, opponentStatBoxMap); } else { this.processUpdateMessageForCard(message); } } private void processUpdateMessageForPlayer(Pane statBox, UpdateMessage message, Map<String, Integer> playerMap) { String key = (String)message.getKey(); Integer value = (Integer)message.getValue(); playerMap.put(key, value); this.repaintStatBox(statBox, playerMap); } private void processUpdateMessageForCard(UpdateMessage message) { ZoneView<?> zoneView = getZoneViewForCard(message.getId()); if (zoneView != null) { zoneView.updateCard(message.getId(), message); } } private void processZoneChangeMessage(ZoneChangeMessage message) { int sourceZoneId = message.getSourceZone(); int destinationZoneId = message.getDestinationZone(); int cardId = message.getEntity(); if (sourceZoneId == opponentDeckId) { this.removeCardFromDeck(sourceZoneId, cardId); } else if (sourceZoneId == playerDeckId) { this.removeCardFromDeck(sourceZoneId, cardId); } if (destinationZoneId == opponentHandId) { this.addCardToOpponentHand(cardId); } if (destinationZoneId == opponentDeckId || destinationZoneId == playerDeckId) { this.addCardToDeck(destinationZoneId, cardId); } if (this.zoneViewMap.containsKey(sourceZoneId) && this.zoneViewMap.containsKey(destinationZoneId)) { if (sourceZoneId == playerHandId) { PlayerHandZoneView sourceZone = getZoneView(sourceZoneId); CardHandDocumentController card = sourceZone.getCard(cardId); CardBattlefieldDocumentController newCard = new CardBattlefieldDocumentController(card.getCard(), this); ZoneView<?> zoneView = getZoneView(destinationZoneId); if (zoneView instanceof BattlefieldZoneView) { BattlefieldZoneView destinationZone = getZoneView(destinationZoneId); destinationZone.addPane(cardId, newCard); } else if (zoneView instanceof PlayerHandZoneView) { // TODO: Card moving from battlefield to hand for example, doesn't happen yet } else { // Card moving to deck is handled above } } } if (zoneViewMap.containsKey(sourceZoneId)) { ZoneView<?> view = zoneViewMap.get(sourceZoneId); view.removePane(cardId); } } private void addCardToDeck(int zoneId, int cardId) { logger.info("Add card to deck " + zoneId + " card " + cardId); Set<Integer> set = this.deckEntityIds.get(zoneId); set.add(cardId); this.repaintDeckLabels(); } private void processEntityRemoveMessage(EntityRemoveMessage message) { int entityId = message.getEntity(); for (Entry<Integer, Set<Integer>> deckIdsEntry : this.deckEntityIds.entrySet()) { int deckId = deckIdsEntry.getKey(); Set<Integer> deckIds = deckIdsEntry.getValue(); if (deckIds.contains(entityId)) { this.removeCardFromDeck(deckId, message.getEntity()); } } ZoneView<?> zoneView = getZoneViewForCard(message.getEntity()); if (zoneView != null) { zoneView.removePane(message.getEntity()); } } private void processAvailableTargetsMessage(AvailableTargetsMessage message) { this.chosenTargets.clear(); this.targetInfo = message; if (message.getAction().equals("Attack")) { ZoneView<?> attackerZoneView = getZoneViewForCard(message.getEntity()); if (attackerZoneView != null) { ((BattlefieldZoneView)attackerZoneView).setCardIsAttacking(message.getEntity()); } } for (int i = 0; i < message.getTargets().length; i++) { int target = message.getTargets()[i]; if (target != this.opponentId) { ZoneView<?> zoneView = getZoneViewForCard(target); if (zoneView != null) { zoneView.setCardTargetable(target); } } else { // automatically target opponent UseableActionMessage newMessage = new UseableActionMessage(message.getEntity(), message.getAction(), false, target); this.createAndSendMessage(newMessage); } } this.createCancelActionsButton(); if (message.getMax() != message.getMin()) { // This is an action that can have a variant amount of targets. We need a "Done" button to use it for fewer targets createActionButton("Done", () -> sendActionWithCurrentTargets(message)); } } public boolean addTarget(int id) { if (chosenTargets.isEmpty() && targetInfo.getMax() == 1) { // Only one target, perform that action with target now this.createAndSendMessage(new UseableActionMessage(targetInfo.getEntity(), targetInfo.getAction(), false, id)); return false; // Card should not be selected, because we are sending the action directly } if (chosenTargets.size() >= targetInfo.getMax()) { logger.info("Cannot add more targets"); return false; } if (chosenTargets.add(id)) { return true; } else { chosenTargets.remove(id); return false; } } private void sendActionWithCurrentTargets(AvailableTargetsMessage message) { this.send(new UseAbilityMessage(gameId, message.getEntity(), message.getAction(), chosenTargets.stream().mapToInt(i -> i).toArray())); this.actionBox.getChildren().clear(); this.clearActiveFromAllCards(); } private void processClientDisconnectedMessage(ClientDisconnectedMessage message) { Platform.runLater(() -> this.loginMessage.setText("Opponent Left")); } private void processGameOverMessage(GameOverMessage message) { Platform.runLater(() -> this.loginMessage.setText("Game Over!")); } private void removeCardFromDeck(int zoneId, int cardId) { Set<Integer> set = this.deckEntityIds.get(zoneId); set.remove(cardId); this.repaintDeckLabels(); } private void createActionButton(UseableActionMessage message) { createActionButton(message.getAction(), () -> createAndSendMessage(message)); } private void createCancelActionsButton() { createActionButton("Cancel", () -> cancelAction()); } private void createActionButton(String label, Runnable action) { double paneHeight = actionBox.getHeight(); double paneWidth = actionBox.getWidth(); int maxActions = 8; double actionWidth = paneWidth / maxActions; ActionButton actionButton = new ActionButton(label, actionWidth, paneHeight, action); actionBox.getChildren().add(actionButton); } private void clearSavedActions() { this.savedMessages.clear(); this.actionBox.getChildren().clear(); } public void cancelAction() { this.clearActiveFromAllCards(); this.actionBox.getChildren().clear(); for (UseableActionMessage message : this.savedMessages) { this.processUseableActionMessage(message); } } private void clearActiveFromAllCards() { for (ZoneView<?> zoneView : this.zoneViewMap.values()) { zoneView.removeActiveAllCards(); zoneView.removeScrappableAllCards(); } } private void repaintStatBox(Pane statBox, Map<String, Integer> playerMap) { statBox.getChildren().clear(); for (Map.Entry<String, Integer> entry : playerMap.entrySet()) { String key = entry.getKey(); statBox.getChildren().add(new Label(key)); int value = entry.getValue(); statBox.getChildren().add(new Label(String.format("%d",value))); } } private void repaintDeckLabels() { if (this.deckEntityIds.containsKey(opponentDeckId)) { this.opponentDeckLabel.setText(String.format("%d", this.deckEntityIds.get(opponentDeckId).size())); } if (this.deckEntityIds.containsKey(playerDeckId)) { this.playerDeckLabel.setText(String.format("%d", this.deckEntityIds.get(playerDeckId).size())); } } private void createOpponentHand(ZoneMessage message) { for (int i : message.getEntities()) { this.addCardToOpponentHand(i); } } private void addCardToOpponentHand(int i) { ZoneView<?> opponentHand = this.zoneViewMap.get(this.opponentHandId); opponentHand.addSimplePane(i, this.cardForOpponentHand()); } private Pane cardForOpponentHand() { double paneHeight = opponentHandPane.getHeight(); double paneWidth = opponentHandPane.getWidth(); int maxCards = 10; double cardWidth = paneWidth / maxCards; Pane card = new Pane(); Rectangle cardBack = new Rectangle(0,0,cardWidth,paneHeight); cardBack.setFill(Color.AQUAMARINE); card.getChildren().add(cardBack); return card; } public void closeWindow() { Node source = this.rootPane; Stage stage = (Stage)source.getScene().getWindow(); stage.close(); } @SuppressWarnings("unchecked") private <T extends ZoneView<?>> T getZoneView(int id) { return (T) this.zoneViewMap.get(id); } private ZoneView<?> getZoneViewForCard(int id) { for (ZoneView<?> zoneView : this.zoneViewMap.values()) { if (zoneView.contains(id)) { return zoneView; } } return null; } public void closeGame() { this.send(new ChatMessage(1, "unused", "(Ends game " + gameId + ")")); // run on window close } }
cardshifter-fx/src/main/java/com/cardshifter/client/GameClientController.java
package com.cardshifter.client; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.TreeMap; import java.util.function.Consumer; import javafx.application.Platform; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.control.ListView; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.HBox; import javafx.scene.layout.Pane; import javafx.scene.layout.VBox; import javafx.scene.paint.Color; import javafx.scene.shape.Rectangle; import com.cardshifter.api.both.ChatMessage; import com.cardshifter.api.incoming.RequestTargetsMessage; import com.cardshifter.api.incoming.UseAbilityMessage; import com.cardshifter.api.messages.Message; import com.cardshifter.api.outgoing.AvailableTargetsMessage; import com.cardshifter.api.outgoing.CardInfoMessage; import com.cardshifter.api.outgoing.ClientDisconnectedMessage; import com.cardshifter.api.outgoing.EntityRemoveMessage; import com.cardshifter.api.outgoing.GameOverMessage; import com.cardshifter.api.outgoing.NewGameMessage; import com.cardshifter.api.outgoing.PlayerMessage; import com.cardshifter.api.outgoing.ResetAvailableActionsMessage; import com.cardshifter.api.outgoing.UpdateMessage; import com.cardshifter.api.outgoing.UseableActionMessage; import com.cardshifter.api.outgoing.WaitMessage; import com.cardshifter.api.outgoing.WelcomeMessage; import com.cardshifter.api.outgoing.ZoneChangeMessage; import com.cardshifter.api.outgoing.ZoneMessage; import com.cardshifter.client.views.ActionButton; import com.cardshifter.client.views.BattlefieldZoneView; import com.cardshifter.client.views.CardBattlefieldDocumentController; import com.cardshifter.client.views.CardHandDocumentController; import com.cardshifter.client.views.CardView; import com.cardshifter.client.views.PlayerHandZoneView; import com.cardshifter.client.views.ZoneView; import javafx.scene.Node; import javafx.stage.Stage; public class GameClientController { @FXML private AnchorPane rootPane; @FXML private Label loginMessage; @FXML private ListView<String> serverMessages; @FXML private VBox opponentStatBox; @FXML private VBox playerStatBox; @FXML private HBox actionBox; @FXML private HBox opponentHandPane; @FXML private HBox opponentBattlefieldPane; @FXML private Pane opponentDeckPane; @FXML private Label opponentDeckLabel; @FXML private HBox playerHandPane; @FXML private HBox playerBattlefieldPane; @FXML private Pane playerDeckPane; @FXML private Label playerDeckLabel; @FXML private Label playerName; @FXML private Label opponentName; private int gameId; private int playerIndex; private int opponentId; private int opponentHandId; private int opponentBattlefieldId; private int opponentDeckId; private final Map<Integer, Set<Integer>> deckEntityIds = new HashMap<>(); private int playerId; private int playerHandId; private int playerBattlefieldId; private int playerDeckId; private final Map<String, Integer> playerStatBoxMap = new HashMap<>(); private final Map<String, Integer> opponentStatBoxMap = new HashMap<>(); private final Map<Integer, ZoneView<?>> zoneViewMap = new HashMap<>(); private List<UseableActionMessage> savedMessages = new ArrayList<>(); private final Set<Integer> chosenTargets = new HashSet<>(); private AvailableTargetsMessage targetInfo; private Consumer<Message> sender; public void acceptConnectionSettings(NewGameMessage message, Consumer<Message> sender) { // this is passed into this object after it is automatically created by the FXML document this.playerIndex = message.getPlayerIndex(); this.gameId = message.getGameId(); System.out.println(String.format("You are player: %d", this.playerIndex)); this.sender = sender; } public void createAndSendMessage(UseableActionMessage action) { if (action.isTargetRequired()) { this.send(new RequestTargetsMessage(gameId, action.getId(), action.getAction())); } else { this.send(new UseAbilityMessage(gameId, action.getId(), action.getAction(), action.getTargetId())); } //A new list of actions will be sent back from the server, so it is okay to clear them this.actionBox.getChildren().clear(); this.clearActiveFromAllCards(); } private void send(Message message) { this.sender.accept(message); } public void processMessageFromServer(Message message) { serverMessages.getItems().add(message.toString()); //this is for diagnostics so I can copy paste the messages to know their format System.out.println(message.toString()); if (message instanceof WelcomeMessage) { Platform.runLater(() -> loginMessage.setText(message.toString())); } else if (message instanceof WaitMessage) { Platform.runLater(() -> loginMessage.setText(message.toString())); } else if (message instanceof PlayerMessage) { Platform.runLater(() -> this.processPlayerMessage((PlayerMessage)message)); } else if (message instanceof ZoneMessage) { this.assignZoneIdForZoneMessage((ZoneMessage)message); } else if (message instanceof CardInfoMessage) { this.processCardInfoMessage((CardInfoMessage)message); } else if (message instanceof UseableActionMessage) { this.savedMessages.add((UseableActionMessage)message); this.processUseableActionMessage((UseableActionMessage)message); } else if (message instanceof UpdateMessage) { this.processUpdateMessage((UpdateMessage)message); } else if (message instanceof ZoneChangeMessage) { this.processZoneChangeMessage((ZoneChangeMessage)message); } else if (message instanceof EntityRemoveMessage) { this.processEntityRemoveMessage((EntityRemoveMessage)message); } else if (message instanceof AvailableTargetsMessage) { this.processAvailableTargetsMessage((AvailableTargetsMessage)message); } else if (message instanceof ResetAvailableActionsMessage) { //this.processResetAvailableActionsMessage((ResetAvailableActionsMessage)message); this.clearSavedActions(); } else if (message instanceof ClientDisconnectedMessage) { this.processClientDisconnectedMessage((ClientDisconnectedMessage)message); } else if (message instanceof GameOverMessage) { this.processGameOverMessage((GameOverMessage)message); } } private void processPlayerMessage(PlayerMessage message) { if (message.getIndex() == this.playerIndex) { this.playerId = message.getId(); this.playerName.setText(message.getName()); this.processPlayerMessageForPlayer(message, playerStatBox, playerStatBoxMap); } else { this.opponentId = message.getId(); this.opponentName.setText(message.getName()); this.processPlayerMessageForPlayer(message, opponentStatBox, opponentStatBoxMap); this.loginMessage.setText("Opponent Connected"); } } private void processPlayerMessageForPlayer(PlayerMessage message, Pane statBox, Map<String, Integer> playerMap) { statBox.getChildren().clear(); Map<String, Integer> sortedMap = new TreeMap<>(message.getProperties()); playerMap.putAll(sortedMap); for (Map.Entry<String, Integer> entry : sortedMap.entrySet()) { String key = entry.getKey(); statBox.getChildren().add(new Label(key)); int value = entry.getValue(); statBox.getChildren().add(new Label(String.format("%d",value))); } } private void assignZoneIdForZoneMessage(ZoneMessage message) { if (!this.zoneViewMap.containsKey(message.getId())) { if (message.getName().equals("Battlefield")) { if(message.getOwner() == this.playerId) { this.playerBattlefieldId = message.getId(); this.zoneViewMap.put(message.getId(), new BattlefieldZoneView(message.getId(), playerBattlefieldPane)); } else { this.opponentBattlefieldId = message.getId(); this.zoneViewMap.put(message.getId(), new BattlefieldZoneView(message.getId(), opponentBattlefieldPane)); } } else if (message.getName().equals("Hand")) { if (message.getOwner() == this.playerId) { this.playerHandId = message.getId(); this.zoneViewMap.put(message.getId(), new PlayerHandZoneView(message.getId(), playerHandPane)); } else { this.opponentHandId = message.getId(); this.zoneViewMap.put(this.opponentHandId, new ZoneView<CardView>(message.getId(), opponentHandPane)); this.createOpponentHand(message); } } else if (message.getName().equals("Deck")) { if (message.getOwner() == this.playerId) { this.playerDeckId = message.getId(); this.deckEntityIds.put(message.getId(), new HashSet<>()); for (int entity : message.getEntities()) { this.addCardToDeck(playerDeckId, entity); } this.repaintDeckLabels(); this.zoneViewMap.put(message.getId(), new ZoneView<CardView>(message.getId(), playerDeckPane)); } else { this.opponentDeckId = message.getId(); this.deckEntityIds.put(message.getId(), new HashSet<>()); for (int entity : message.getEntities()) { this.addCardToDeck(opponentDeckId, entity); } this.repaintDeckLabels(); this.zoneViewMap.put(message.getId(), new ZoneView<CardView>(message.getId(), opponentDeckPane)); } } } } private void processCardInfoMessage(CardInfoMessage message) { int targetZone = message.getZone(); if (targetZone == opponentBattlefieldId) { this.addCardToOpponentBattlefieldPane(message); } else if (targetZone == opponentHandId) { this.addCardToOpponentHandPane(message); } else if (targetZone == playerBattlefieldId) { this.addCardToPlayerBattlefieldPane(message); } else if (targetZone == playerHandId) { this.addCardToPlayerHandPane(message); } } private void addCardToOpponentBattlefieldPane(CardInfoMessage message) { BattlefieldZoneView opponentBattlefield = getZoneView(opponentBattlefieldId); CardBattlefieldDocumentController card = new CardBattlefieldDocumentController(message, this); opponentBattlefield.addPane(message.getId(), card); } private void addCardToOpponentHandPane(CardInfoMessage message) { // this is unused because *KNOWN* cards don't pop up in opponent hand without reason (at least not now) } private void addCardToPlayerBattlefieldPane(CardInfoMessage message) { // this is unused because cards don't pop up in the battlefield magically, they are *moved* there (at least for now) } private void addCardToPlayerHandPane(CardInfoMessage message) { PlayerHandZoneView playerHand = getZoneView(playerHandId); CardHandDocumentController card = new CardHandDocumentController(message, this); playerHand.addPane(message.getId(), card); } private void processUseableActionMessage(UseableActionMessage message) { ZoneView<?> zoneView = getZoneViewForCard(message.getId()); System.out.println("Usable message: " + message + " inform zone " + zoneView); if (zoneView == null) { this.createActionButton(message); return; } if (message.getAction().equals("Attack")) { ((BattlefieldZoneView)zoneView).setCardCanAttack(message.getId(),message); } else if (message.getAction().equals("Scrap")) { zoneView.setCardScrappable(message.getId(), message); } else { zoneView.setCardActive(message.getId(), message); } } private void processUpdateMessage(UpdateMessage message) { if (message.getId() == this.playerId) { this.processUpdateMessageForPlayer(playerStatBox, message, playerStatBoxMap); } else if (message.getId() == this.opponentId) { this.processUpdateMessageForPlayer(opponentStatBox, message, opponentStatBoxMap); } else { this.processUpdateMessageForCard(message); } } private void processUpdateMessageForPlayer(Pane statBox, UpdateMessage message, Map<String, Integer> playerMap) { String key = (String)message.getKey(); Integer value = (Integer)message.getValue(); playerMap.put(key, value); this.repaintStatBox(statBox, playerMap); } private void processUpdateMessageForCard(UpdateMessage message) { ZoneView<?> zoneView = getZoneViewForCard(message.getId()); if (zoneView != null) { zoneView.updateCard(message.getId(), message); } } private void processZoneChangeMessage(ZoneChangeMessage message) { int sourceZoneId = message.getSourceZone(); int destinationZoneId = message.getDestinationZone(); int cardId = message.getEntity(); if (sourceZoneId == opponentDeckId) { this.removeCardFromDeck(sourceZoneId, cardId); } else if (sourceZoneId == playerDeckId) { this.removeCardFromDeck(sourceZoneId, cardId); } if (destinationZoneId == opponentHandId) { this.addCardToOpponentHand(cardId); } if (destinationZoneId == opponentDeckId || destinationZoneId == playerDeckId) { this.addCardToDeck(destinationZoneId, cardId); } if (this.zoneViewMap.containsKey(sourceZoneId) && this.zoneViewMap.containsKey(destinationZoneId)) { if (sourceZoneId == playerHandId) { PlayerHandZoneView sourceZone = getZoneView(sourceZoneId); CardHandDocumentController card = sourceZone.getCard(cardId); CardBattlefieldDocumentController newCard = new CardBattlefieldDocumentController(card.getCard(), this); ZoneView<?> zoneView = getZoneView(destinationZoneId); if (zoneView instanceof BattlefieldZoneView) { BattlefieldZoneView destinationZone = getZoneView(destinationZoneId); destinationZone.addPane(cardId, newCard); } else if (zoneView instanceof PlayerHandZoneView) { // TODO: Card moving from battlefield to hand for example, doesn't happen yet } else { // Card moving to deck is handled above } } } if (zoneViewMap.containsKey(sourceZoneId)) { ZoneView<?> view = zoneViewMap.get(sourceZoneId); view.removePane(cardId); } } private void addCardToDeck(int zoneId, int cardId) { System.out.println("Add card to deck " + zoneId + " card " + cardId); Set<Integer> set = this.deckEntityIds.get(zoneId); set.add(cardId); this.repaintDeckLabels(); } private void processEntityRemoveMessage(EntityRemoveMessage message) { int entityId = message.getEntity(); for (Entry<Integer, Set<Integer>> deckIdsEntry : this.deckEntityIds.entrySet()) { int deckId = deckIdsEntry.getKey(); Set<Integer> deckIds = deckIdsEntry.getValue(); if (deckIds.contains(entityId)) { this.removeCardFromDeck(deckId, message.getEntity()); } } ZoneView<?> zoneView = getZoneViewForCard(message.getEntity()); if (zoneView != null) { zoneView.removePane(message.getEntity()); } } private void processAvailableTargetsMessage(AvailableTargetsMessage message) { this.chosenTargets.clear(); this.targetInfo = message; if (message.getAction().equals("Attack")) { ZoneView<?> attackerZoneView = getZoneViewForCard(message.getEntity()); if (attackerZoneView != null) { ((BattlefieldZoneView)attackerZoneView).setCardIsAttacking(message.getEntity()); } } for (int i = 0; i < message.getTargets().length; i++) { int target = message.getTargets()[i]; if (target != this.opponentId) { ZoneView<?> zoneView = getZoneViewForCard(target); if (zoneView != null) { zoneView.setCardTargetable(target); } } else { // automatically target opponent UseableActionMessage newMessage = new UseableActionMessage(message.getEntity(), message.getAction(), false, target); this.createAndSendMessage(newMessage); } } this.createCancelActionsButton(); if (message.getMax() != message.getMin()) { // This is an action that can have a variant amount of targets. We need a "Done" button to use it for fewer targets createActionButton("Done", () -> sendActionWithCurrentTargets(message)); } } public boolean addTarget(int id) { if (chosenTargets.isEmpty() && targetInfo.getMax() == 1) { // Only one target, perform that action with target now this.createAndSendMessage(new UseableActionMessage(targetInfo.getEntity(), targetInfo.getAction(), false, id)); return false; // Card should not be selected, because we are sending the action directly } if (chosenTargets.size() >= targetInfo.getMax()) { System.out.println("Cannot add more targets"); return false; } if (chosenTargets.add(id)) { return true; } else { chosenTargets.remove(id); return false; } } private void sendActionWithCurrentTargets(AvailableTargetsMessage message) { this.send(new UseAbilityMessage(gameId, message.getEntity(), message.getAction(), chosenTargets.stream().mapToInt(i -> i).toArray())); this.actionBox.getChildren().clear(); this.clearActiveFromAllCards(); } private void processClientDisconnectedMessage(ClientDisconnectedMessage message) { Platform.runLater(() -> this.loginMessage.setText("Opponent Left")); } private void processGameOverMessage(GameOverMessage message) { Platform.runLater(() -> this.loginMessage.setText("Game Over!")); } private void removeCardFromDeck(int zoneId, int cardId) { Set<Integer> set = this.deckEntityIds.get(zoneId); set.remove(cardId); this.repaintDeckLabels(); } private void createActionButton(UseableActionMessage message) { createActionButton(message.getAction(), () -> createAndSendMessage(message)); } private void createCancelActionsButton() { createActionButton("Cancel", () -> cancelAction()); } private void createActionButton(String label, Runnable action) { double paneHeight = actionBox.getHeight(); double paneWidth = actionBox.getWidth(); int maxActions = 8; double actionWidth = paneWidth / maxActions; ActionButton actionButton = new ActionButton(label, actionWidth, paneHeight, action); actionBox.getChildren().add(actionButton); } private void clearSavedActions() { this.savedMessages.clear(); this.actionBox.getChildren().clear(); } public void cancelAction() { this.clearActiveFromAllCards(); this.actionBox.getChildren().clear(); for (UseableActionMessage message : this.savedMessages) { this.processUseableActionMessage(message); } } private void clearActiveFromAllCards() { for (ZoneView<?> zoneView : this.zoneViewMap.values()) { zoneView.removeActiveAllCards(); zoneView.removeScrappableAllCards(); } } private void repaintStatBox(Pane statBox, Map<String, Integer> playerMap) { statBox.getChildren().clear(); for (Map.Entry<String, Integer> entry : playerMap.entrySet()) { String key = entry.getKey(); statBox.getChildren().add(new Label(key)); int value = entry.getValue(); statBox.getChildren().add(new Label(String.format("%d",value))); } } private void repaintDeckLabels() { if (this.deckEntityIds.containsKey(opponentDeckId)) { this.opponentDeckLabel.setText(String.format("%d", this.deckEntityIds.get(opponentDeckId).size())); } if (this.deckEntityIds.containsKey(playerDeckId)) { this.playerDeckLabel.setText(String.format("%d", this.deckEntityIds.get(playerDeckId).size())); } } private void createOpponentHand(ZoneMessage message) { for (int i : message.getEntities()) { this.addCardToOpponentHand(i); } } private void addCardToOpponentHand(int i) { ZoneView<?> opponentHand = this.zoneViewMap.get(this.opponentHandId); opponentHand.addSimplePane(i, this.cardForOpponentHand()); } private Pane cardForOpponentHand() { double paneHeight = opponentHandPane.getHeight(); double paneWidth = opponentHandPane.getWidth(); int maxCards = 10; double cardWidth = paneWidth / maxCards; Pane card = new Pane(); Rectangle cardBack = new Rectangle(0,0,cardWidth,paneHeight); cardBack.setFill(Color.AQUAMARINE); card.getChildren().add(cardBack); return card; } public void closeWindow() { Node source = this.rootPane; Stage stage = (Stage)source.getScene().getWindow(); stage.close(); } @SuppressWarnings("unchecked") private <T extends ZoneView<?>> T getZoneView(int id) { return (T) this.zoneViewMap.get(id); } private ZoneView<?> getZoneViewForCard(int id) { for (ZoneView<?> zoneView : this.zoneViewMap.values()) { if (zoneView.contains(id)) { return zoneView; } } return null; } public void closeGame() { this.send(new ChatMessage(1, "unused", "(Ends game " + gameId + ")")); // run on window close } }
now using log4j in GameClientController
cardshifter-fx/src/main/java/com/cardshifter/client/GameClientController.java
now using log4j in GameClientController
Java
apache-2.0
bc12f263b559873a7dc5cf010473cec10c5b9cf4
0
facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho,facebook/litho
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.widget; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.Typeface; import android.text.Editable; import android.text.InputFilter; import android.text.Layout; import android.text.TextUtils; import android.text.TextWatcher; import android.util.TypedValue; import android.view.Gravity; import android.widget.EditText; import com.facebook.R; import com.facebook.litho.ComponentContext; import com.facebook.litho.ComponentLayout; import com.facebook.litho.EventHandler; import com.facebook.litho.Output; import com.facebook.litho.Size; import com.facebook.litho.annotations.MountSpec; import com.facebook.litho.annotations.OnBind; import com.facebook.litho.annotations.OnCreateMountContent; import com.facebook.litho.annotations.OnLoadStyle; import com.facebook.litho.annotations.OnMeasure; import com.facebook.litho.annotations.OnMount; import com.facebook.litho.annotations.OnUnbind; import com.facebook.litho.annotations.OnUnmount; import com.facebook.litho.annotations.Prop; import com.facebook.litho.annotations.PropDefault; import com.facebook.litho.annotations.ResType; import com.facebook.litho.utils.MeasureUtils; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.text.Layout.Alignment.ALIGN_NORMAL; import static android.view.View.TEXT_ALIGNMENT_CENTER; import static android.view.View.TEXT_ALIGNMENT_TEXT_END; import static android.view.View.TEXT_ALIGNMENT_TEXT_START; @MountSpec(isPureRender = true, events = {TextChangedEvent.class}) class EditTextSpec { private static final Layout.Alignment[] ALIGNMENT = Layout.Alignment.values(); private static final TextUtils.TruncateAt[] TRUNCATE_AT = TextUtils.TruncateAt.values(); private static final Typeface DEFAULT_TYPEFACE = Typeface.DEFAULT; private static final int DEFAULT_COLOR = 0; private static final int[][] DEFAULT_TEXT_COLOR_STATE_LIST_STATES = {{0}}; private static final int[] DEFAULT_TEXT_COLOR_STATE_LIST_COLORS = {Color.BLACK}; private static final int DEFAULT_HINT_COLOR = 0; private static final int[][] DEFAULT_HINT_COLOR_STATE_LIST_STATES = {{0}}; private static final int[] DEFAULT_HINT_COLOR_STATE_LIST_COLORS = {Color.LTGRAY}; private static final int DEFAULT_GRAVITY = Gravity.CENTER_VERTICAL | Gravity.START; @PropDefault protected static final int minLines = Integer.MIN_VALUE; @PropDefault protected static final int maxLines = Integer.MAX_VALUE; @PropDefault protected static final int maxLength = Integer.MAX_VALUE; @PropDefault protected static final int shadowColor = Color.GRAY; @PropDefault protected static final int textColor = DEFAULT_COLOR; @PropDefault protected static final ColorStateList textColorStateList = new ColorStateList(DEFAULT_TEXT_COLOR_STATE_LIST_STATES,DEFAULT_TEXT_COLOR_STATE_LIST_COLORS); @PropDefault protected static final int hintColor = DEFAULT_HINT_COLOR; @PropDefault protected static final ColorStateList hintColorStateList = new ColorStateList(DEFAULT_HINT_COLOR_STATE_LIST_STATES,DEFAULT_HINT_COLOR_STATE_LIST_COLORS); @PropDefault protected static final int linkColor = DEFAULT_COLOR; @PropDefault protected static final int textSize = 13; @PropDefault protected static final int textStyle = DEFAULT_TYPEFACE.getStyle(); @PropDefault protected static final Typeface typeface = DEFAULT_TYPEFACE; @PropDefault protected static final float spacingMultiplier = 1.0f; @PropDefault protected static final Layout.Alignment textAlignment = ALIGN_NORMAL; @PropDefault protected static final int gravity = DEFAULT_GRAVITY; @PropDefault protected static final boolean editable = true; @PropDefault protected static final int selection = -1; @OnLoadStyle static void onLoadStyle( ComponentContext c, Output<TextUtils.TruncateAt> ellipsize, Output<Float> spacingMultiplier, Output<Integer> minLines, Output<Integer> maxLines, Output<Boolean> isSingleLine, Output<CharSequence> text, Output<ColorStateList> textColorStateList, Output<Integer> linkColor, Output<Integer> highlightColor, Output<Integer> textSize, Output<Layout.Alignment> textAlignment, Output<Integer> textStyle, Output<Float> shadowRadius, Output<Float> shadowDx, Output<Float> shadowDy, Output<Integer> shadowColor, Output<Integer> gravity) { final TypedArray a = c.obtainStyledAttributes(R.styleable.Text, 0); for (int i = 0, size = a.getIndexCount(); i < size; i++) { final int attr = a.getIndex(i); if (attr == R.styleable.Text_android_text) { text.set(a.getString(attr)); } else if (attr == R.styleable.Text_android_textColor) { textColorStateList.set(a.getColorStateList(attr)); } else if (attr == R.styleable.Text_android_textSize) { textSize.set(a.getDimensionPixelSize(attr, 0)); } else if (attr == R.styleable.Text_android_ellipsize) { final int index = a.getInteger(attr, 0); if (index > 0) { ellipsize.set(TRUNCATE_AT[index - 1]); } } else if (SDK_INT >= JELLY_BEAN_MR1 && attr == R.styleable.Text_android_textAlignment) { textAlignment.set(ALIGNMENT[a.getInteger(attr, 0)]); } else if (attr == R.styleable.Text_android_minLines) { minLines.set(a.getInteger(attr, -1)); } else if (attr == R.styleable.Text_android_maxLines) { maxLines.set(a.getInteger(attr, -1)); } else if (attr == R.styleable.Text_android_singleLine) { isSingleLine.set(a.getBoolean(attr, false)); } else if (attr == R.styleable.Text_android_textColorLink) { linkColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_textColorHighlight) { highlightColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_textStyle) { textStyle.set(a.getInteger(attr, 0)); } else if (attr == R.styleable.Text_android_lineSpacingMultiplier) { spacingMultiplier.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowDx) { shadowDx.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowDy) { shadowDy.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowRadius) { shadowRadius.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowColor) { shadowColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_gravity) { gravity.set(a.getInteger(attr, 0)); } } a.recycle(); } @OnMeasure static void onMeasure( ComponentContext c, ComponentLayout layout, int widthSpec, int heightSpec, Size size, @Prop(optional = true, resType = ResType.STRING) CharSequence text, @Prop(optional = true, resType = ResType.STRING) CharSequence hint, @Prop(optional = true) TextUtils.TruncateAt ellipsize, @Prop(optional = true, resType = ResType.INT) int minLines, @Prop(optional = true, resType = ResType.INT) int maxLines, @Prop(optional = true, resType = ResType.INT) int maxLength, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowRadius, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowDx, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowDy, @Prop(optional = true, resType = ResType.COLOR) int shadowColor, @Prop(optional = true, resType = ResType.BOOL) boolean isSingleLine, @Prop(optional = true, resType = ResType.COLOR) int textColor, @Prop(optional = true) ColorStateList textColorStateList, @Prop(optional = true, resType = ResType.COLOR) int hintColor, @Prop(optional = true) ColorStateList hintColorStateList, @Prop(optional = true, resType = ResType.COLOR) int linkColor, @Prop(optional = true, resType = ResType.COLOR) int highlightColor, @Prop(optional = true, resType = ResType.DIMEN_TEXT) int textSize, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float extraSpacing, @Prop(optional = true, resType = ResType.FLOAT) float spacingMultiplier, @Prop(optional = true) int textStyle, @Prop(optional = true) Typeface typeface, @Prop(optional = true) Layout.Alignment textAlignment, @Prop(optional = true) int gravity, @Prop(optional = true) boolean editable, @Prop(optional = true) int selection) { // TODO(11759579) - don't allocate a new EditText in every measure. final EditText editText = new EditText(c); initEditText( editText, text, hint, ellipsize, minLines, maxLines, maxLength, shadowRadius, shadowDx, shadowDy, shadowColor, isSingleLine, textColor, textColorStateList, hintColor, hintColorStateList,
litho-widget/src/main/java/com/facebook/litho/widget/EditTextSpec.java
/** * Copyright (c) 2014-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.facebook.litho.widget; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.Typeface; import android.text.Editable; import android.text.InputFilter; import android.text.Layout; import android.text.TextUtils; import android.text.TextWatcher; import android.util.TypedValue; import android.view.Gravity; import android.widget.EditText; import com.facebook.R; import com.facebook.litho.ComponentContext; import com.facebook.litho.ComponentLayout; import com.facebook.litho.EventHandler; import com.facebook.litho.Output; import com.facebook.litho.Size; import com.facebook.litho.annotations.MountSpec; import com.facebook.litho.annotations.OnBind; import com.facebook.litho.annotations.OnCreateMountContent; import com.facebook.litho.annotations.OnLoadStyle; import com.facebook.litho.annotations.OnMeasure; import com.facebook.litho.annotations.OnMount; import com.facebook.litho.annotations.OnUnbind; import com.facebook.litho.annotations.OnUnmount; import com.facebook.litho.annotations.Prop; import com.facebook.litho.annotations.PropDefault; import com.facebook.litho.annotations.ResType; import com.facebook.litho.utils.MeasureUtils; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.JELLY_BEAN_MR1; import static android.text.Layout.Alignment.ALIGN_NORMAL; import static android.view.View.TEXT_ALIGNMENT_CENTER; import static android.view.View.TEXT_ALIGNMENT_TEXT_END; import static android.view.View.TEXT_ALIGNMENT_TEXT_START; @MountSpec(isPureRender = true, events = {TextChangedEvent.class}) class EditTextSpec { private static final Layout.Alignment[] ALIGNMENT = Layout.Alignment.values(); private static final TextUtils.TruncateAt[] TRUNCATE_AT = TextUtils.TruncateAt.values(); private static final Typeface DEFAULT_TYPEFACE = Typeface.DEFAULT; private static final int DEFAULT_COLOR = 0; private static final int[][] DEFAULT_TEXT_COLOR_STATE_LIST_STATES = {{0}}; private static final int[] DEFAULT_TEXT_COLOR_STATE_LIST_COLORS = {Color.BLACK}; private static final int DEFAULT_HINT_COLOR = 0; private static final int[][] DEFAULT_HINT_COLOR_STATE_LIST_STATES = {{0}}; private static final int[] DEFAULT_HINT_COLOR_STATE_LIST_COLORS = {Color.LTGRAY}; private static final int DEFAULT_GRAVITY = Gravity.CENTER_VERTICAL | Gravity.START; @PropDefault protected static final int minLines = Integer.MIN_VALUE; @PropDefault protected static final int maxLines = Integer.MAX_VALUE; @PropDefault protected static final int maxLength = Integer.MAX_VALUE; @PropDefault protected static final int shadowColor = Color.GRAY; @PropDefault protected static final int textColor = DEFAULT_COLOR; @PropDefault protected static final ColorStateList textColorStateList = new ColorStateList(DEFAULT_TEXT_COLOR_STATE_LIST_STATES,DEFAULT_TEXT_COLOR_STATE_LIST_COLORS); @PropDefault protected static final int hintColor = DEFAULT_HINT_COLOR; @PropDefault protected static final ColorStateList hintColorStateList = new ColorStateList(DEFAULT_HINT_COLOR_STATE_LIST_STATES,DEFAULT_HINT_COLOR_STATE_LIST_COLORS); @PropDefault protected static final int linkColor = DEFAULT_COLOR; @PropDefault protected static final int textSize = 13; @PropDefault protected static final int textStyle = DEFAULT_TYPEFACE.getStyle(); @PropDefault protected static final Typeface typeface = DEFAULT_TYPEFACE; @PropDefault protected static final float spacingMultiplier = 1.0f; @PropDefault protected static final Layout.Alignment textAlignment = ALIGN_NORMAL; @PropDefault protected static final int gravity = DEFAULT_GRAVITY; @PropDefault protected static final boolean editable = true; @PropDefault protected static final int selection = -1; @OnLoadStyle static void onLoadStyle( ComponentContext c, Output<TextUtils.TruncateAt> ellipsize, Output<Float> spacingMultiplier, Output<Integer> minLines, Output<Integer> maxLines, Output<Boolean> isSingleLine, Output<CharSequence> text, Output<ColorStateList> textColorStateList, Output<Integer> linkColor, Output<Integer> highlightColor, Output<Integer> textSize, Output<Layout.Alignment> textAlignment, Output<Integer> textStyle, Output<Float> shadowRadius, Output<Float> shadowDx, Output<Float> shadowDy, Output<Integer> shadowColor, Output<Integer> gravity) { final TypedArray a = c.obtainStyledAttributes(R.styleable.Text, 0); for (int i = 0, size = a.getIndexCount(); i < size; i++) { final int attr = a.getIndex(i); if (attr == R.styleable.Text_android_text) { text.set(a.getString(attr)); } else if (attr == R.styleable.Text_android_textColor) { textColorStateList.set(a.getColorStateList(attr)); } else if (attr == R.styleable.Text_android_textSize) { textSize.set(a.getDimensionPixelSize(attr, 0)); } else if (attr == R.styleable.Text_android_ellipsize) { final int index = a.getInteger(attr, 0); if (index > 0) { ellipsize.set(TRUNCATE_AT[index - 1]); } } else if (SDK_INT >= JELLY_BEAN_MR1 && attr == R.styleable.Text_android_textAlignment) { textAlignment.set(ALIGNMENT[a.getInteger(attr, 0)]); } else if (attr == R.styleable.Text_android_minLines) { minLines.set(a.getInteger(attr, -1)); } else if (attr == R.styleable.Text_android_maxLines) { maxLines.set(a.getInteger(attr, -1)); } else if (attr == R.styleable.Text_android_singleLine) { isSingleLine.set(a.getBoolean(attr, false)); } else if (attr == R.styleable.Text_android_textColorLink) { linkColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_textColorHighlight) { highlightColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_textStyle) { textStyle.set(a.getInteger(attr, 0)); } else if (attr == R.styleable.Text_android_lineSpacingMultiplier) { spacingMultiplier.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowDx) { shadowDx.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowDy) { shadowDy.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowRadius) { shadowRadius.set(a.getFloat(attr, 0)); } else if (attr == R.styleable.Text_android_shadowColor) { shadowColor.set(a.getColor(attr, 0)); } else if (attr == R.styleable.Text_android_gravity) { gravity.set(a.getInteger(attr, 0)); } } a.recycle(); } @OnMeasure static void onMeasure( ComponentContext c, ComponentLayout layout, int widthSpec, int heightSpec, Size size, @Prop(optional = true, resType = ResType.STRING) CharSequence text, @Prop(optional = true, resType = ResType.STRING) CharSequence hint, @Prop(optional = true) TextUtils.TruncateAt ellipsize, @Prop(optional = true, resType = ResType.INT) int minLines, @Prop(optional = true, resType = ResType.INT) int maxLines, @Prop(optional = true, resType = ResType.INT) int maxLength, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowRadius, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowDx, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float shadowDy, @Prop(optional = true, resType = ResType.COLOR) int shadowColor, @Prop(optional = true, resType = ResType.BOOL) boolean isSingleLine, @Prop(optional = true, resType = ResType.COLOR) int textColor, @Prop(optional = true) ColorStateList textColorStateList, @Prop(optional = true, resType = ResType.COLOR) int hintColor, @Prop(optional = true) ColorStateList hintColorStateList, @Prop(optional = true, resType = ResType.COLOR) int linkColor, @Prop(optional = true, resType = ResType.COLOR) int highlightColor, @Prop(optional = true, resType = ResType.DIMEN_TEXT) int textSize, @Prop(optional = true, resType = ResType.DIMEN_OFFSET) float extraSpacing, @Prop(optional = true, resType = ResType.FLOAT) float spacingMultiplier, @Prop(optional = true) int textStyle, @Prop(optional = true) Typeface typeface, @Prop(optional = true) Layout.Alignment textAlignment, @Prop(optional = true) int gravity, @Prop(optional = true) boolean editable, @Prop(optional = true) int selection) { // TODO(11759579) - don't allocate a new EditText in every measure. final EditText editText = new EditText(c); initEditText( editText, text, hint, ellipsize, minLines, maxLines, maxLength, shadowRadius, shadowDx, shadowDy, shadowColor, isSingleLine, textColor, textColorStateList,
Lines authored by uts This commit forms part of the blame-preserving initial commit suite.
litho-widget/src/main/java/com/facebook/litho/widget/EditTextSpec.java
Lines authored by uts
Java
apache-2.0
c0ae278c85e02884d0c463f07672ed2acfe667ff
0
liamjjmcnamara/sicsthsense,liamjjmcnamara/sicsthsense,liamjjmcnamara/sicsthsense,liamjjmcnamara/sicsthsense,liamjjmcnamara/sicsthsense
/* * Copyright (c) 2013, Swedish Institute of Computer Science * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of The Swedish Institute of Computer Science nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE SWEDISH INSTITUTE OF COMPUTER SCIENCE BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* Description: Jersey Resource for SicsthSense Resources. Handles config of Resources * contains the Parsers and Streams of the associated Resource. * TODO: * */ package com.sics.sicsthsense.resources.atmosphere; import java.util.List; import java.util.Iterator; import java.util.concurrent.atomic.AtomicLong; import java.net.URI; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.DELETE; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.Consumes; import javax.ws.rs.QueryParam; import javax.ws.rs.PathParam; import javax.ws.rs.FormParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; import com.yammer.metrics.annotation.Timed; import com.yammer.dropwizard.auth.Auth; import org.atmosphere.annotation.Broadcast; import org.atmosphere.annotation.Suspend; import com.sics.sicsthsense.Utils; import com.sics.sicsthsense.core.*; import com.sics.sicsthsense.jdbi.*; import com.sics.sicsthsense.model.*; import com.sics.sicsthsense.auth.*; import com.sics.sicsthsense.auth.annotation.RestrictedTo; import com.sics.sicsthsense.model.security.Authority; // publicly reachable path of the resource @Path("/{userId}/resources") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class ResourceResource { private final StorageDAO storage; private final AtomicLong counter; private PollSystem pollSystem; private final Logger logger = LoggerFactory.getLogger(ResourceResource.class); public ParseData parseData; List<Parser> parsers; // constructor with the system's stoarge and poll system. public ResourceResource() { this.storage = DAOFactory.getInstance(); this.pollSystem = PollSystem.getInstance(); this.counter = new AtomicLong(); this.parseData = new ParseData();; } @GET @Timed public List<Resource> getResources(@PathParam("userId") long userId, @QueryParam("key") String key) { //User visitor = new User(); //logger.info("Getting all user "+userId+" resources for visitor "+visitor.toString()); Utils.checkHierarchy(userId); User user = storage.findUserById(userId); if (user==null) {logger.info("No userId match"); throw new WebApplicationException(Status.NOT_FOUND);} List<Resource> resources = storage.findResourcesByOwnerId(userId); if (!user.isAuthorised(key)) { logger.warn("User token/key doesn't match"); throw new WebApplicationException(Status.FORBIDDEN); /* Iterator<Resource> it = resources.iterator(); while (it.hasNext()) { Resource r = it.next(); if (r.) {it.remove();} }*/ } return resources; } // resourceName can be the resourceID or the URL-encoded resource label @GET @Path("/{resourceId}") @Produces({MediaType.APPLICATION_JSON}) @Timed public Resource getResource(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, @QueryParam("key") String key) { logger.info("Getting user/resource: "+userId+"/"+resourceName); Utils.checkHierarchy(userId); Resource resource = Utils.findResourceByIdName(resourceName,userId); if (resource == null) { logger.error("Resource "+resourceName+" does not exist!"); throw new WebApplicationException(Status.NOT_FOUND); } if (resource.getOwner_id() != userId) { logger.error("User "+userId+" does not own resource "+resourceName); throw new WebApplicationException(Status.NOT_FOUND); } User user = storage.findUserById(userId); if (user==null) {throw new WebApplicationException(Status.NOT_FOUND);} if (!user.isAuthorised(key) && !resource.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } /* if (!resource.isReadable(visitor)) { logger.warn("Resource "+resource.getId()+" is not readable to user "+visitor.getId()); // throw new WebApplicationException(Status.FORBIDDEN); }*/ return resource; } // post new resource definition @POST @Consumes({MediaType.APPLICATION_JSON}) @Timed public long postResource( @PathParam("userId") long userId, Resource resource, @QueryParam("key") String key) { logger.info("Adding user/resource:"+resource.getLabel()); Utils.checkHierarchy(userId); User user = storage.findUserById(userId); if (user==null) {throw new WebApplicationException(Status.NOT_FOUND);} if (!user.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } resource.setOwner_id(userId); // should know the owner long resourceId = Utils.insertResource(resource); if (resource.getPolling_period() > 0) { // remake pollers with updated Resource attribtues pollSystem.rebuildResourcePoller(resourceId); } return resourceId; } // put updated resource definition @PUT @Consumes({MediaType.APPLICATION_JSON}) @Timed @Path("/{resourceId}") public void updateResource(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, Resource resource, @QueryParam("key") String key) { logger.info("Updating resourceName:"+resourceName); User user = storage.findUserById(userId); Resource oldresource = Utils.findResourceByIdName(resourceName,userId); Utils.checkHierarchy(user,oldresource); if (!user.isAuthorised(key) && !resource.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } Utils.updateResource(oldresource.getId(), resource); } @DELETE @Timed @Path("/{resourceId}") public void deleteResource(//@RestrictedTo(Authority.ROLE_USER) User visitor, @PathParam("userId") long userId, @PathParam("resourceId") String resourceName, @QueryParam("key") String key) { logger.warn("Deleting resourceName:"+resourceName); User user = storage.findUserById(userId); Resource resource = Utils.findResourceByIdName(resourceName,userId); Utils.checkHierarchy(user,resource); if (!user.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } // delete child streams and parsers List<Stream> streams = storage.findStreamsByResourceId(resource.getId()); List<Parser> parsers = storage.findParsersByResourceId(resource.getId()); for (Parser p: parsers) {storage.deleteParser(p.getId());} for (Stream s: streams) {storage.deleteStream(s.getId());} storage.deleteResource(resource.getId()); // remake pollers with updated Resource attribtues pollSystem.rebuildResourcePoller(resource.getId()); } @GET @Path("/{resourceId}/data") public String getData() { return "Error: Only Streams can have data read"; } @GET @Path("/{resourceId}/rebuild") public String rebuild(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName) { Resource resource = Utils.findResourceByIdName(resourceName,userId); if (resource==null) { logger.error("Resource name does not exist for rebuild: "+resourceName); return "Error: resource name does not exist"; } pollSystem.rebuildResourcePoller(resource.getId()); logger.info("Rebuilt resource: "+resourceName); return "rebuild"; } // Post data to the resource, and run data through its parsers @POST @Consumes({MediaType.APPLICATION_JSON}) @Path("/{resourceId}/data") public String postData(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, String data, @QueryParam("key") String key) { User user = storage.findUserById(userId); Resource resource = Utils.findResourceByIdName(resourceName); Utils.checkHierarchy(user,resource); if (!resource.isAuthorised(key) && !user.isAuthorised(key)) { logger.warn("Incorrect authorisation key!"); throw new WebApplicationException(Status.FORBIDDEN); } //logger.info("Adding data to resource: "+resource.getLabel()); // if parsers are undefined, create them! List<Parser> parsers = storage.findParsersByResourceId(resource.getId()); if (parsers==null || parsers.size()==0) { logger.info("No parsers defined! Trying to auto create for: "+resource.getLabel()); try { // staticness is a mess... parseData.autoCreateJsonParsers(PollSystem.getInstance().mapper, resource, data); } catch (Exception e) { logger.error("JSON parsing for auto creation failed!"); return "Error: JSON parsing for auto creation failed!"; } } //run it through the parsers applyParsers(resource.getId(), data); // update Resource last_posted storage.postedResource(resource.getId(),System.currentTimeMillis()); return "Success"; } public void applyParsers(long resourceId, String data) { //logger.info("Applying all parsers to data: "+data); if (parsers==null) { parsers = storage.findParsersByResourceId(resourceId); } for (Parser parser: parsers) { //logger.info("applying a parser "+parser.getInput_parser()); try { parseData.apply(parser,data); } catch (Exception e) { logger.error("Parsing "+data+" failed!"+e); } } } }
engine/src/main/java/com/sics/sicsthsense/resources/atmosphere/ResourceResource.java
/* * Copyright (c) 2013, Swedish Institute of Computer Science * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of The Swedish Institute of Computer Science nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE SWEDISH INSTITUTE OF COMPUTER SCIENCE BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* Description: Jersey Resource for SicsthSense Resources. Handles config of Resources * contains the Parsers and Streams of the associated Resource. * TODO: * */ package com.sics.sicsthsense.resources.atmosphere; import java.util.List; import java.util.Iterator; import java.util.concurrent.atomic.AtomicLong; import java.net.URI; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.DELETE; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.Consumes; import javax.ws.rs.QueryParam; import javax.ws.rs.PathParam; import javax.ws.rs.FormParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Response.Status; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Optional; import com.yammer.metrics.annotation.Timed; import com.yammer.dropwizard.auth.Auth; import org.atmosphere.annotation.Broadcast; import org.atmosphere.annotation.Suspend; import com.sics.sicsthsense.Utils; import com.sics.sicsthsense.core.*; import com.sics.sicsthsense.jdbi.*; import com.sics.sicsthsense.model.*; import com.sics.sicsthsense.auth.*; import com.sics.sicsthsense.auth.annotation.RestrictedTo; import com.sics.sicsthsense.model.security.Authority; // publicly reachable path of the resource @Path("/{userId}/resources") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class ResourceResource { private final StorageDAO storage; private final AtomicLong counter; private PollSystem pollSystem; private final Logger logger = LoggerFactory.getLogger(ResourceResource.class); public ParseData parseData; List<Parser> parsers; // constructor with the system's stoarge and poll system. public ResourceResource() { this.storage = DAOFactory.getInstance(); this.pollSystem = PollSystem.getInstance(); this.counter = new AtomicLong(); this.parseData = new ParseData();; } @GET @Timed public List<Resource> getResources(@PathParam("userId") long userId, @QueryParam("key") String key) { //User visitor = new User(); //logger.info("Getting all user "+userId+" resources for visitor "+visitor.toString()); Utils.checkHierarchy(userId); User user = storage.findUserById(userId); if (user==null) {logger.info("No userId match"); throw new WebApplicationException(Status.NOT_FOUND);} List<Resource> resources = storage.findResourcesByOwnerId(userId); if (!user.isAuthorised(key)) { logger.warn("User token/key doesn't match"); throw new WebApplicationException(Status.FORBIDDEN); /* Iterator<Resource> it = resources.iterator(); while (it.hasNext()) { Resource r = it.next(); if (r.) {it.remove();} }*/ } return resources; } // resourceName can be the resourceID or the URL-encoded resource label @GET @Path("/{resourceId}") @Produces({MediaType.APPLICATION_JSON}) @Timed public Resource getResource(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, @QueryParam("key") String key) { logger.info("Getting user/resource: "+userId+"/"+resourceName); Utils.checkHierarchy(userId); Resource resource = Utils.findResourceByIdName(resourceName,userId); if (resource == null) { logger.error("Resource "+resourceName+" does not exist!"); throw new WebApplicationException(Status.NOT_FOUND); } if (resource.getOwner_id() != userId) { logger.error("User "+userId+" does not own resource "+resourceName); throw new WebApplicationException(Status.NOT_FOUND); } User user = storage.findUserById(userId); if (user==null || key==null) {throw new WebApplicationException(Status.NOT_FOUND);} if (!user.isAuthorised(key) && !resource.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } /* if (!resource.isReadable(visitor)) { logger.warn("Resource "+resource.getId()+" is not readable to user "+visitor.getId()); // throw new WebApplicationException(Status.FORBIDDEN); }*/ return resource; } // post new resource definition @POST @Consumes({MediaType.APPLICATION_JSON}) @Timed public long postResource( @PathParam("userId") long userId, Resource resource, @QueryParam("key") String key) { logger.info("Adding user/resource:"+resource.getLabel()); Utils.checkHierarchy(userId); User user = storage.findUserById(userId); if (user==null || key==null) {throw new WebApplicationException(Status.NOT_FOUND);} if (!user.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } resource.setOwner_id(userId); // should know the owner long resourceId = Utils.insertResource(resource); if (resource.getPolling_period() > 0) { // remake pollers with updated Resource attribtues pollSystem.rebuildResourcePoller(resourceId); } return resourceId; } // put updated resource definition @PUT @Consumes({MediaType.APPLICATION_JSON}) @Timed @Path("/{resourceId}") public void updateResource(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, Resource resource, @QueryParam("key") String key) { logger.info("Updating resourceName:"+resourceName); User user = storage.findUserById(userId); Resource oldresource = Utils.findResourceByIdName(resourceName,userId); Utils.checkHierarchy(user,oldresource); if (!user.isAuthorised(key) && !resource.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } Utils.updateResource(oldresource.getId(), resource); } @DELETE @Timed @Path("/{resourceId}") public void deleteResource(//@RestrictedTo(Authority.ROLE_USER) User visitor, @PathParam("userId") long userId, @PathParam("resourceId") String resourceName, @QueryParam("key") String key) { logger.warn("Deleting resourceName:"+resourceName); User user = storage.findUserById(userId); Resource resource = Utils.findResourceByIdName(resourceName,userId); Utils.checkHierarchy(user,resource); if (!user.isAuthorised(key)) {throw new WebApplicationException(Status.FORBIDDEN); } // delete child streams and parsers List<Stream> streams = storage.findStreamsByResourceId(resource.getId()); List<Parser> parsers = storage.findParsersByResourceId(resource.getId()); for (Parser p: parsers) {storage.deleteParser(p.getId());} for (Stream s: streams) {storage.deleteStream(s.getId());} storage.deleteResource(resource.getId()); // remake pollers with updated Resource attribtues pollSystem.rebuildResourcePoller(resource.getId()); } @GET @Path("/{resourceId}/data") public String getData() { return "Error: Only Streams can have data read"; } @GET @Path("/{resourceId}/rebuild") public String rebuild(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName) { Resource resource = Utils.findResourceByIdName(resourceName,userId); if (resource==null) { logger.error("Resource name does not exist for rebuild: "+resourceName); return "Error: resource name does not exist"; } pollSystem.rebuildResourcePoller(resource.getId()); logger.info("Rebuilt resource: "+resourceName); return "rebuild"; } // Post data to the resource, and run data through its parsers @POST @Consumes({MediaType.APPLICATION_JSON}) @Path("/{resourceId}/data") public String postData(@PathParam("userId") long userId, @PathParam("resourceId") String resourceName, String data, @QueryParam("key") String key) { User user = storage.findUserById(userId); Resource resource = Utils.findResourceByIdName(resourceName); Utils.checkHierarchy(user,resource); if (!resource.isAuthorised(key) && !user.isAuthorised(key)) { logger.warn("Incorrect authorisation key!"); throw new WebApplicationException(Status.FORBIDDEN); } //logger.info("Adding data to resource: "+resource.getLabel()); // if parsers are undefined, create them! List<Parser> parsers = storage.findParsersByResourceId(resource.getId()); if (parsers==null || parsers.size()==0) { logger.info("No parsers defined! Trying to auto create for: "+resource.getLabel()); try { // staticness is a mess... parseData.autoCreateJsonParsers(PollSystem.getInstance().mapper, resource, data); } catch (Exception e) { logger.error("JSON parsing for auto creation failed!"); return "Error: JSON parsing for auto creation failed!"; } } //run it through the parsers applyParsers(resource.getId(), data); // update Resource last_posted storage.postedResource(resource.getId(),System.currentTimeMillis()); return "Success"; } public void applyParsers(long resourceId, String data) { //logger.info("Applying all parsers to data: "+data); if (parsers==null) { parsers = storage.findParsersByResourceId(resourceId); } for (Parser parser: parsers) { //logger.info("applying a parser "+parser.getInput_parser()); try { parseData.apply(parser,data); } catch (Exception e) { logger.error("Parsing "+data+" failed!"+e); } } } }
Missing key now gives Forbidden rather than Not found
engine/src/main/java/com/sics/sicsthsense/resources/atmosphere/ResourceResource.java
Missing key now gives Forbidden rather than Not found
Java
apache-2.0
ecbc8da952e125bc665832fe6d72f055b25eee4d
0
garys-esri/offline-mapper-android
/******************************************************************************* * Copyright 2015 Esri * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.esri.wdc.offlinemapper; import java.util.HashMap; import java.util.List; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.GridView; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.esri.core.io.UserCredentials; import com.esri.core.portal.Portal; import com.esri.core.portal.PortalItem; import com.esri.core.portal.PortalQueryParams; import com.esri.core.portal.PortalQueryResultSet; public class WebMapAdapter extends BaseAdapter { private static final String TAG = WebMapAdapter.class.getSimpleName(); private final Activity activity; private final Portal portal; private final Object resultSetLock = new Object(); private HashMap<Integer, PortalItem> resultsByIndex = new HashMap<Integer, PortalItem>(); private final HashMap<String, Bitmap> thumbnailsByItemId = new HashMap<String, Bitmap>(); private int totalResultCount = 0; public WebMapAdapter(Activity activity, UserCredentials userCredentials) { this.activity = activity; portal = new Portal("https://www.arcgis.com", userCredentials); doSearch(0); } private void doSearch(final int neededIndex) { //Check to see if this search is still needed synchronized (resultSetLock) { if (!resultsByIndex.containsKey(neededIndex)) { AsyncTask<Void, Void, PortalQueryResultSet<PortalItem>> task = new AsyncTask<Void, Void, PortalQueryResultSet<PortalItem>>() { @Override protected PortalQueryResultSet<PortalItem> doInBackground(Void... v) { try { PortalQueryParams params = new PortalQueryParams("owner:" + portal.getCredentials().getUserName() + " AND type:Web Map"); params.setStartIndex(neededIndex / 10 * 10 + 1); return portal.findItems(params); } catch (Exception e) { Log.e(TAG, "Error doing initial search", e); return null; } } @Override protected void onPostExecute(PortalQueryResultSet<PortalItem> result) { //This gives us 1) the total number of results and 2) a subset of results. synchronized (resultSetLock) { totalResultCount = result.getTotalResults(); resultsByIndex.clear(); thumbnailsByItemId.clear(); } List<PortalItem> results = result.getResults(); for (int i = 0; i < results.size(); i++) { Log.d(TAG, "putting result " + (i + result.getQueryParams().getStartIndex() - 1) + ": " + results.get(i).getTitle()); synchronized (resultSetLock) {//TODO consider moving outside of for loop resultsByIndex.put(i + result.getQueryParams().getStartIndex() - 1, results.get(i)); } } notifyDataSetChanged(); } }; task.execute(new Void[0]); } } } private Bitmap getThumbnail(PortalItem item) throws Exception { Bitmap thumbnail = null; synchronized (resultSetLock) { thumbnail = thumbnailsByItemId.get(item.getItemId()); } if (null == thumbnail) { Log.d(TAG, "fetching thumbnail for " + item.getTitle()); byte[] bytes = item.fetchThumbnail(); Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length); synchronized (resultSetLock) { thumbnailsByItemId.put(item.getItemId(), bmp); } } return thumbnail; } public int getCount() { synchronized (resultSetLock) { return totalResultCount; } } public Object getItem(int position) { synchronized (resultSetLock) { return resultsByIndex.get(position); } } public long getItemId(int position) { return position; } public View getView(final int position, View convertView, ViewGroup parent) { Log.d(TAG, "getView " + position + "; view is " + (null == convertView ? "" : "NOT ") + "null"); ViewGroup viewToReturn; if (convertView == null) { LinearLayout layout = new LinearLayout(activity); layout.setOrientation(LinearLayout.VERTICAL); final ImageView imageView = new ImageView(activity); imageView.setLayoutParams(new GridView.LayoutParams(200, 133)); imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); imageView.setPadding(8, 8, 8, 8); layout.addView(imageView); final TextView textView = new TextView(activity); layout.addView(textView); viewToReturn = layout; PortalItem item = null; synchronized (resultSetLock) { item = resultsByIndex.get(position); } if (null == item) { doSearch(position); } synchronized (resultSetLock) { item = resultsByIndex.get(position); } if (null != item) { final String itemName = item.getTitle(); activity.runOnUiThread(new Runnable() { public void run() { textView.setText(itemName); } }); new AsyncTask<PortalItem, Void, Bitmap>() { protected Bitmap doInBackground(PortalItem... params) { try { return getThumbnail(params[0]); } catch (Exception e) { Log.e(TAG, "Couldn't get thumbnail for item " + params[0].getItemId(), e); return null; } }; protected void onPostExecute(final Bitmap result) { activity.runOnUiThread(new Runnable() { public void run() { imageView.setImageBitmap(result); } }); }; }.execute(new PortalItem[] { item }); } } else { viewToReturn = (ViewGroup) convertView; } return viewToReturn; } }
source/OfflineMapper/src/com/esri/wdc/offlinemapper/WebMapAdapter.java
/******************************************************************************* * Copyright 2015 Esri * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.esri.wdc.offlinemapper; import java.util.HashMap; import java.util.List; import android.app.Activity; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.os.AsyncTask; import android.util.Log; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.GridView; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.esri.core.io.UserCredentials; import com.esri.core.portal.Portal; import com.esri.core.portal.PortalItem; import com.esri.core.portal.PortalQueryParams; import com.esri.core.portal.PortalQueryResultSet; public class WebMapAdapter extends BaseAdapter { private static final String TAG = WebMapAdapter.class.getSimpleName(); private final Activity activity; private final Portal portal; private final Object resultSetLock = new Object(); private HashMap<Integer, PortalItem> resultsByIndex = new HashMap<Integer, PortalItem>(); private final HashMap<String, Bitmap> thumbnailsByItemId = new HashMap<String, Bitmap>(); private int totalResultCount = 0; public WebMapAdapter(Activity activity, UserCredentials userCredentials) { this.activity = activity; portal = new Portal("https://www.arcgis.com", userCredentials); doInitialSearch(); } private void doInitialSearch() { AsyncTask<Void, Void, PortalQueryResultSet<PortalItem>> task = new AsyncTask<Void, Void, PortalQueryResultSet<PortalItem>>() { @Override protected PortalQueryResultSet<PortalItem> doInBackground(Void... v) { try { PortalQueryParams params = new PortalQueryParams("owner:" + portal.getCredentials().getUserName() + " AND type:Web Map"); return portal.findItems(params); } catch (Exception e) { Log.e(TAG, "Error doing initial search", e); return null; } } @Override protected void onPostExecute(PortalQueryResultSet<PortalItem> result) { //This gives us 1) the total number of results and 2) the first subset of results. synchronized (resultSetLock) { totalResultCount = result.getTotalResults(); resultsByIndex.clear(); thumbnailsByItemId.clear(); List<PortalItem> results = result.getResults(); for (int i = 0; i < results.size(); i++) { Log.d(TAG, "result " + i + " is " + results.get(i).getTitle()); resultsByIndex.put(i, results.get(i)); } } notifyDataSetChanged(); } }; task.execute(new Void[0]); } private Bitmap getThumbnail(PortalItem item) throws Exception { Bitmap thumbnail; synchronized (resultSetLock) { thumbnail = thumbnailsByItemId.get(item.getItemId()); } if (null == thumbnail) { Log.d(TAG, "fetching thumbnail for " + item.getTitle()); byte[] bytes = item.fetchThumbnail(); Bitmap bmp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length); synchronized (resultSetLock) { thumbnailsByItemId.put(item.getItemId(), bmp); } } return thumbnail; } public int getCount() { synchronized (resultSetLock) { return totalResultCount; } } public Object getItem(int position) { synchronized (resultSetLock) { return resultsByIndex.get(position); } } public long getItemId(int position) { return position; } public View getView(final int position, View convertView, ViewGroup parent) { ViewGroup viewToReturn; if (convertView == null) { LinearLayout layout = new LinearLayout(activity); layout.setOrientation(LinearLayout.VERTICAL); ImageView imageView = new ImageView(activity) { @Override public void invalidate() { Log.d(TAG, "Invalidating " + position); super.invalidate(); } }; imageView.setLayoutParams(new GridView.LayoutParams(200, 133)); imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); imageView.setPadding(8, 8, 8, 8); layout.addView(imageView); TextView textView = new TextView(activity); layout.addView(textView); viewToReturn = layout; } else { viewToReturn = (ViewGroup) convertView; } final ImageView imageView = (ImageView) viewToReturn.getChildAt(0); PortalItem item = null; synchronized (resultSetLock) { item = resultsByIndex.get(position); } if (null != item) { final TextView textView = (TextView) viewToReturn.getChildAt(1); final String itemName = item.getTitle(); activity.runOnUiThread(new Runnable() { public void run() { textView.setText(itemName); } }); new AsyncTask<PortalItem, Void, Bitmap>() { protected Bitmap doInBackground(PortalItem... params) { try { return getThumbnail(params[0]); } catch (Exception e) { Log.e(TAG, "Couldn't get thumbnail for item " + params[0].getItemId(), e); return null; } }; protected void onPostExecute(final Bitmap result) { activity.runOnUiThread(new Runnable() { public void run() { imageView.setImageBitmap(result); } }); }; }.execute(new PortalItem[] { item }); } return viewToReturn; } }
Trying to fetch results beyond first 10
source/OfflineMapper/src/com/esri/wdc/offlinemapper/WebMapAdapter.java
Trying to fetch results beyond first 10
Java
apache-2.0
e9a0c9e53c11a3e2aaf8ea4c6ab884572aa0cdd4
0
goblinr/omim,ygorshenin/omim,darina/omim,alexzatsepin/omim,goblinr/omim,mgsergio/omim,syershov/omim,trashkalmar/omim,Zverik/omim,Transtech/omim,Transtech/omim,syershov/omim,matsprea/omim,VladiMihaylenko/omim,bykoianko/omim,65apps/omim,goblinr/omim,mgsergio/omim,yunikkk/omim,Transtech/omim,matsprea/omim,syershov/omim,rokuz/omim,mgsergio/omim,VladiMihaylenko/omim,milchakov/omim,mgsergio/omim,65apps/omim,65apps/omim,dobriy-eeh/omim,rokuz/omim,yunikkk/omim,dobriy-eeh/omim,alexzatsepin/omim,VladiMihaylenko/omim,mapsme/omim,mapsme/omim,VladiMihaylenko/omim,VladiMihaylenko/omim,goblinr/omim,mapsme/omim,mapsme/omim,rokuz/omim,Zverik/omim,mapsme/omim,goblinr/omim,yunikkk/omim,matsprea/omim,ygorshenin/omim,goblinr/omim,alexzatsepin/omim,mgsergio/omim,rokuz/omim,darina/omim,alexzatsepin/omim,darina/omim,syershov/omim,Zverik/omim,yunikkk/omim,matsprea/omim,yunikkk/omim,mpimenov/omim,ygorshenin/omim,mpimenov/omim,mpimenov/omim,goblinr/omim,mgsergio/omim,mpimenov/omim,Zverik/omim,mpimenov/omim,matsprea/omim,alexzatsepin/omim,ygorshenin/omim,bykoianko/omim,alexzatsepin/omim,syershov/omim,trashkalmar/omim,milchakov/omim,Zverik/omim,dobriy-eeh/omim,darina/omim,dobriy-eeh/omim,milchakov/omim,mapsme/omim,VladiMihaylenko/omim,bykoianko/omim,rokuz/omim,syershov/omim,Zverik/omim,bykoianko/omim,milchakov/omim,mpimenov/omim,milchakov/omim,stangls/omim,yunikkk/omim,trashkalmar/omim,bykoianko/omim,Zverik/omim,dobriy-eeh/omim,stangls/omim,Transtech/omim,mapsme/omim,Transtech/omim,VladiMihaylenko/omim,stangls/omim,trashkalmar/omim,ygorshenin/omim,65apps/omim,Transtech/omim,syershov/omim,65apps/omim,dobriy-eeh/omim,yunikkk/omim,ygorshenin/omim,trashkalmar/omim,Zverik/omim,VladiMihaylenko/omim,milchakov/omim,mgsergio/omim,Transtech/omim,yunikkk/omim,darina/omim,bykoianko/omim,65apps/omim,VladiMihaylenko/omim,VladiMihaylenko/omim,trashkalmar/omim,syershov/omim,mpimenov/omim,Transtech/omim,goblinr/omim,dobriy-eeh/omim,bykoianko/omim,mapsme/omim,matsprea/omim,stangls/omim,stangls/omim,rokuz/omim,dobriy-eeh/omim,mapsme/omim,mpimenov/omim,trashkalmar/omim,mpimenov/omim,mapsme/omim,milchakov/omim,trashkalmar/omim,milchakov/omim,goblinr/omim,yunikkk/omim,stangls/omim,milchakov/omim,darina/omim,yunikkk/omim,rokuz/omim,goblinr/omim,matsprea/omim,dobriy-eeh/omim,ygorshenin/omim,syershov/omim,VladiMihaylenko/omim,alexzatsepin/omim,mpimenov/omim,mgsergio/omim,Zverik/omim,trashkalmar/omim,65apps/omim,65apps/omim,mgsergio/omim,rokuz/omim,mapsme/omim,syershov/omim,Zverik/omim,ygorshenin/omim,stangls/omim,goblinr/omim,rokuz/omim,rokuz/omim,mgsergio/omim,milchakov/omim,darina/omim,dobriy-eeh/omim,alexzatsepin/omim,darina/omim,alexzatsepin/omim,65apps/omim,goblinr/omim,matsprea/omim,65apps/omim,Transtech/omim,darina/omim,mapsme/omim,alexzatsepin/omim,rokuz/omim,bykoianko/omim,rokuz/omim,ygorshenin/omim,mpimenov/omim,stangls/omim,ygorshenin/omim,trashkalmar/omim,matsprea/omim,bykoianko/omim,syershov/omim,VladiMihaylenko/omim,mpimenov/omim,bykoianko/omim,Zverik/omim,milchakov/omim,alexzatsepin/omim,stangls/omim,syershov/omim,trashkalmar/omim,darina/omim,bykoianko/omim,yunikkk/omim,dobriy-eeh/omim,milchakov/omim,bykoianko/omim,Zverik/omim,syershov/omim,Transtech/omim,dobriy-eeh/omim,VladiMihaylenko/omim,alexzatsepin/omim,trashkalmar/omim,dobriy-eeh/omim,rokuz/omim,ygorshenin/omim,mapsme/omim,mgsergio/omim,mpimenov/omim,Zverik/omim,alexzatsepin/omim,stangls/omim,goblinr/omim,ygorshenin/omim,stangls/omim,Transtech/omim,darina/omim,darina/omim,bykoianko/omim,Transtech/omim,65apps/omim,matsprea/omim,milchakov/omim,mgsergio/omim,darina/omim
package com.mapswithme.maps.editor; import android.annotation.SuppressLint; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import java.util.Random; import com.mapswithme.maps.MwmApplication; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmDialogFragment; import com.mapswithme.util.ConnectionState; import com.mapswithme.util.sharing.SharingHelper; import com.mapswithme.util.statistics.Statistics; public class ViralFragment extends BaseMwmDialogFragment { private static final String EXTRA_CONGRATS_SHOWN = "CongratsShown"; private String mViralText; private final String viralChangesMsg = MwmApplication.get().getString(R.string.editor_done_dialog_1); private final String viralRatingMsg = MwmApplication.get().getString(R.string.editor_done_dialog_2, getUserEditorRank()); public static boolean shouldDisplay() { return !MwmApplication.prefs().contains(EXTRA_CONGRATS_SHOWN) && Editor.nativeGetStats()[0] >= 2 && ConnectionState.isConnected(); } @Override protected int getStyle() { return STYLE_NO_TITLE; } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { MwmApplication.prefs().edit().putBoolean(EXTRA_CONGRATS_SHOWN, true).apply(); @SuppressLint("InflateParams") final View root = inflater.inflate(R.layout.fragment_editor_viral, null); TextView viralText = (TextView) root.findViewById(R.id.viral); initViralText(); viralText.setText(mViralText); root.findViewById(R.id.tell_friend).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { share(); dismiss(); Statistics.INSTANCE.trackEvent(Statistics.EventName.EDITOR_SHARE_CLICK); } }); root.findViewById(R.id.close).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dismiss(); } }); Statistics.INSTANCE.trackEvent(Statistics.EventName.EDITOR_SHARE_SHOW, Statistics.params().add("showed", mViralText.equals(viralChangesMsg) ? "change" : "rating")); return root; } private void share() { SharingHelper.shareViralEditor(getActivity(), R.drawable.img_sharing_editor, R.string.editor_sharing_title, R.string.whatsnew_editor_message_1); } private void initViralText() { mViralText = new Random().nextBoolean() ? viralChangesMsg : viralRatingMsg; } // Counts fake rank in the rating of editors. private static int getUserEditorRank() { return 1000 + new Random().nextInt(1000); } }
android/src/com/mapswithme/maps/editor/ViralFragment.java
package com.mapswithme.maps.editor; import android.annotation.SuppressLint; import android.os.Bundle; import android.support.annotation.Nullable; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.TextView; import java.util.Random; import com.mapswithme.maps.MwmApplication; import com.mapswithme.maps.R; import com.mapswithme.maps.base.BaseMwmDialogFragment; import com.mapswithme.util.ConnectionState; import com.mapswithme.util.sharing.SharingHelper; import com.mapswithme.util.statistics.Statistics; public class ViralFragment extends BaseMwmDialogFragment { private static final String EXTRA_CONTRATS_SHOWN = "CongratsShown"; private String mViralText; private final String viralChangesMsg = getString(R.string.editor_done_dialog_1); private final String viralRatingMsg = getString(R.string.editor_done_dialog_2, getUserEditorRank()); public static boolean shouldDisplay() { return !MwmApplication.prefs().contains(EXTRA_CONTRATS_SHOWN) && Editor.nativeGetStats()[0] >= 2 && ConnectionState.isConnected(); } @Override protected int getStyle() { return STYLE_NO_TITLE; } @Nullable @Override public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) { MwmApplication.prefs().edit().putBoolean(EXTRA_CONTRATS_SHOWN, true).apply(); @SuppressLint("InflateParams") final View root = inflater.inflate(R.layout.fragment_editor_viral, null); TextView viralText = (TextView) root.findViewById(R.id.viral); initViralText(); viralText.setText(mViralText); root.findViewById(R.id.tell_friend).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { share(); dismiss(); Statistics.INSTANCE.trackEvent(Statistics.EventName.EDITOR_SHARE_CLICK); } }); root.findViewById(R.id.close).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { dismiss(); } }); Statistics.INSTANCE.trackEvent(Statistics.EventName.EDITOR_SHARE_SHOW, Statistics.params().add("showed", mViralText.equals(viralChangesMsg) ? "change" : "rating")); return root; } private void share() { SharingHelper.shareViralEditor(getActivity(), R.drawable.img_sharing_editor, R.string.editor_sharing_title, R.string.whatsnew_editor_message_1); } private void initViralText() { mViralText = new Random().nextBoolean() ? viralChangesMsg : viralRatingMsg; } // Counts fake rank in the rating of editors. private static int getUserEditorRank() { return 1000 + new Random().nextInt(1000); } }
[android] Fixed possible crash.
android/src/com/mapswithme/maps/editor/ViralFragment.java
[android] Fixed possible crash.
Java
apache-2.0
8f85206e6f6e275e6a8929f125ce71a303cf31b6
0
thinkofdeath/ThinkMap,thinkofdeath/ThinkMap,thinkofdeath/ThinkMap
/* * Copyright 2014 Matthew Collins * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.thinkofdeath.mapviewer.shared.support; import com.google.gwt.core.client.JavaScriptObject; import uk.co.thinkofdeath.mapviewer.shared.world.Chunk; import java.util.ArrayList; import java.util.List; public class ChunkMap<T extends Chunk> extends JavaScriptObject { protected ChunkMap() { } public static native <T extends Chunk> ChunkMap<T> create()/*-{ return { map: {}, values: @uk.co.thinkofdeath.mapviewer.shared.support.ChunkMap::createList()() }; }-*/; private static ArrayList createList() { return new ArrayList(); } public final native int size()/*-{ return this.values.@java.util.ArrayList::size()(); }-*/; public final native boolean contains(int x, int z)/*-{ return this.map[x + ":" + z] != null; }-*/; public final native void put(int x, int z, T chunk)/*-{ this.map[x + ":" + z] = chunk; this.values.@java.util.ArrayList::add(Ljava/lang/Object;)(chunk); }-*/; public final native T get(int x, int z)/*-{ return this.map[x + ":" + z]; }-*/; public final native T remove(int x, int z)/*-{ var key = x + ":" + z var val = this.map[key]; delete this.map[key]; this.values.@java.util.ArrayList::remove(Ljava/lang/Object;)(val); return val; }-*/; public final native List<Chunk> values()/*-{ return this.values; }-*/; }
shared/src/main/java/uk/co/thinkofdeath/mapviewer/shared/support/ChunkMap.java
/* * Copyright 2014 Matthew Collins * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.thinkofdeath.mapviewer.shared.support; import com.google.gwt.core.client.JavaScriptObject; import uk.co.thinkofdeath.mapviewer.shared.world.Chunk; import java.util.List; public class ChunkMap<T extends Chunk> extends JavaScriptObject { protected ChunkMap() { } public static native <T extends Chunk> ChunkMap<T> create()/*-{ return { map: [], values: @java.util.ArrayList::new()() }; }-*/; public final native int size()/*-{ return this.values.@java.util.ArrayList::size()(); }-*/; public final native boolean contains(int x, int z)/*-{ var cx = this.map[x]; if (cx == null) { return false; } return cx[z] != null; }-*/; public final native void put(int x, int z, T chunk)/*-{ var cx = this.map[x]; if (cx == null) { this.map[x] = cx = []; } cx[z] = chunk; this.values.@java.util.ArrayList::add(Ljava/lang/Object;)(chunk); }-*/; public final native T get(int x, int z)/*-{ var cx = this.map[x]; if (cx == null) { return null; } return cx[z]; }-*/; public final native T remove(int x, int z)/*-{ var cx = this.map[x]; if (cx == null) { return null; } var val = cx[z]; delete cx[z]; this.values.@java.util.ArrayList::remove(Ljava/lang/Object;)(val); return val; }-*/; public final native List<Chunk> values()/*-{ return this.values; }-*/; }
Make ChunkMap less leaky
shared/src/main/java/uk/co/thinkofdeath/mapviewer/shared/support/ChunkMap.java
Make ChunkMap less leaky
Java
apache-2.0
05dc00814e3f71a8661e2586594b842d583f533c
0
michalmac/jsprit,muzuro/jsprit,balage1551/jsprit,HeinrichFilter/jsprit,graphhopper/jsprit,sinhautkarsh2014/winter_jsprit
/******************************************************************************* * Copyright (C) 2013 Stefan Schroeder * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * Contributors: * Stefan Schroeder - initial API and implementation ******************************************************************************/ package basics.algo; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import org.apache.log4j.Logger; import algorithms.acceptors.SolutionAcceptor; import algorithms.selectors.SolutionSelector; import basics.VehicleRoutingProblem; import basics.VehicleRoutingProblemSolution; public class SearchStrategy { public static class DiscoveredSolution { private VehicleRoutingProblemSolution solution; private boolean accepted; private String strategyName; public DiscoveredSolution(VehicleRoutingProblemSolution solution,boolean accepted, String strategyName) { super(); this.solution = solution; this.accepted = accepted; this.strategyName = strategyName; } public VehicleRoutingProblemSolution getSolution() { return solution; } public boolean isAccepted() { return accepted; } public String getStrategyName() { return strategyName; } } private static Logger logger = Logger.getLogger(SearchStrategy.class); private Collection<SearchStrategyModule> searchStrategyModules = new ArrayList<SearchStrategyModule>(); private SolutionSelector solutionSelector; private SolutionAcceptor solutionAcceptor; private String name; public SearchStrategy(SolutionSelector solutionSelector, SolutionAcceptor solutionAcceptor) { super(); this.solutionSelector = solutionSelector; this.solutionAcceptor = solutionAcceptor; logger.info("initialise " + this); } public String getName() { return name; } public void setName(String name) { this.name = name; } public Collection<SearchStrategyModule> getSearchStrategyModules() { return Collections.unmodifiableCollection(searchStrategyModules); } public SolutionSelector getSolutionSelector() { return solutionSelector; } public SolutionAcceptor getSolutionAcceptor() { return solutionAcceptor; } @Override public String toString() { return "searchStrategy [#modules="+searchStrategyModules.size()+"][selector="+solutionSelector+"][acceptor="+solutionAcceptor+"]"; } /** * Runs the search-strategy and its according modules, and returns DiscoveredSolution. * * <p>This involves three basic steps: 1) Selecting a solution from solutions (input parameter) according to {@link SolutionSelector}, 2) running the modules * ({@link SearchStrategyModule}) on the selectedSolution and 3) accepting the new solution according to {@link SolutionAcceptor}. * <p> Note that after 1) the selected solution is copied, thus the original solution is not modified. * <p> Note also that 3) modifies the input parameter solutions by adding, removing, replacing the existing solutions or whatever is defined in the solutionAcceptor. * * @param vrp * @param solutions which will be modified * @return discoveredSolutin * @see SolutionSelector, SearchStrategyModule, SolutionAcceptor */ public DiscoveredSolution run(VehicleRoutingProblem vrp, Collection<VehicleRoutingProblemSolution> solutions){ VehicleRoutingProblemSolution solution = solutionSelector.selectSolution(solutions); if(solution == null) throw new IllegalStateException("solution is null. check solutionSelector to return an appropiate solution."); VehicleRoutingProblemSolution lastSolution = VehicleRoutingProblemSolution.copyOf(solution); for(SearchStrategyModule module : searchStrategyModules){ VehicleRoutingProblemSolution newSolution = module.runAndGetSolution(lastSolution); lastSolution = newSolution; } boolean solutionAccepted = solutionAcceptor.acceptSolution(solutions, lastSolution); DiscoveredSolution discoveredSolution = new DiscoveredSolution(lastSolution, solutionAccepted, getName()); return discoveredSolution; } public void addModule(SearchStrategyModule module){ if(module == null) throw new IllegalStateException("module to be added is null."); searchStrategyModules.add(module); logger.info("module added [module="+module+"][#modules="+searchStrategyModules.size()+"]"); } public void addModuleListener(SearchStrategyModuleListener moduleListener) { for(SearchStrategyModule module : searchStrategyModules){ module.addModuleListener(moduleListener); } } }
jsprit-core/src/main/java/basics/algo/SearchStrategy.java
/******************************************************************************* * Copyright (C) 2013 Stefan Schroeder * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * Contributors: * Stefan Schroeder - initial API and implementation ******************************************************************************/ package basics.algo; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import org.apache.log4j.Logger; import algorithms.acceptors.SolutionAcceptor; import algorithms.selectors.SolutionSelector; import basics.VehicleRoutingProblem; import basics.VehicleRoutingProblemSolution; public class SearchStrategy { public static class DiscoveredSolution { private VehicleRoutingProblemSolution solution; private boolean accepted; private String strategyName; public DiscoveredSolution(VehicleRoutingProblemSolution solution,boolean accepted, String strategyName) { super(); this.solution = solution; this.accepted = accepted; this.strategyName = strategyName; } public VehicleRoutingProblemSolution getSolution() { return solution; } public boolean isAccepted() { return accepted; } public String getStrategyName() { return strategyName; } } private static Logger logger = Logger.getLogger(SearchStrategy.class); private Collection<SearchStrategyModule> searchStrategyModules = new ArrayList<SearchStrategyModule>(); private SolutionSelector solutionSelector; private SolutionAcceptor solutionAcceptor; private String name; public SearchStrategy(SolutionSelector solutionSelector, SolutionAcceptor solutionAcceptor) { super(); this.solutionSelector = solutionSelector; this.solutionAcceptor = solutionAcceptor; logger.info("initialise " + this); } public String getName() { return name; } public void setName(String name) { this.name = name; } public Collection<SearchStrategyModule> getSearchStrategyModules() { return Collections.unmodifiableCollection(searchStrategyModules); } public SolutionSelector getSolutionSelector() { return solutionSelector; } public SolutionAcceptor getSolutionAcceptor() { return solutionAcceptor; } @Override public String toString() { return "searchStrategy [#modules="+searchStrategyModules.size()+"][selector="+solutionSelector+"][acceptor="+solutionAcceptor+"]"; } /** * Runs the search-strategy and its according modules, and returns true if a new solution has been accepted. * * <p>This involves three basic steps: 1) Selecting a solution from solutions (input parameter) according to {@link SolutionSelector}, 2) running the modules * ({@link SearchStrategyModule}) on the selectedSolution and 3) accepting the new solution according to {@link SolutionAcceptor}. * <p> Note that after 1) the selected solution is copied, thus the original solution is not modified. * <p> Note also that 3) modifies the input parameter solutions by adding, removing, replacing the existing solutions or whatever is defined in the solutionAcceptor. * * @param vrp * @param solutions which will be modified * @return boolean true if solution has been accepted, false otherwise * @see SolutionSelector, SearchStrategyModule, SolutionAcceptor */ public DiscoveredSolution run(VehicleRoutingProblem vrp, Collection<VehicleRoutingProblemSolution> solutions){ VehicleRoutingProblemSolution solution = solutionSelector.selectSolution(solutions); if(solution == null) throw new IllegalStateException("solution is null. check solutionSelector to return an appropiate solution."); VehicleRoutingProblemSolution lastSolution = VehicleRoutingProblemSolution.copyOf(solution); for(SearchStrategyModule module : searchStrategyModules){ VehicleRoutingProblemSolution newSolution = module.runAndGetSolution(lastSolution); lastSolution = newSolution; } boolean solutionAccepted = solutionAcceptor.acceptSolution(solutions, lastSolution); DiscoveredSolution discoveredSolution = new DiscoveredSolution(lastSolution, solutionAccepted, getName()); return discoveredSolution; } public void addModule(SearchStrategyModule module){ if(module == null) throw new IllegalStateException("module to be added is null."); searchStrategyModules.add(module); logger.info("module added [module="+module+"][#modules="+searchStrategyModules.size()+"]"); } public void addModuleListener(SearchStrategyModuleListener moduleListener) { for(SearchStrategyModule module : searchStrategyModules){ module.addModuleListener(moduleListener); } } }
modify javadoc
jsprit-core/src/main/java/basics/algo/SearchStrategy.java
modify javadoc
Java
apache-2.0
a607b4496589840be7706f8bf0243cb82ecea829
0
msgpack/msgpack-ruby,msgpack/msgpack-ruby,msgpack/msgpack-ruby
package org.msgpack.jruby; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.BufferUnderflowException; import java.util.Iterator; import java.util.Arrays; import org.jruby.Ruby; import org.jruby.RubyObject; import org.jruby.RubyClass; import org.jruby.RubyBignum; import org.jruby.RubyString; import org.jruby.RubyArray; import org.jruby.RubyHash; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; import org.jcodings.Encoding; import org.jcodings.specific.UTF8Encoding; import static org.msgpack.jruby.Types.*; public class Decoder implements Iterator<IRubyObject> { private final Ruby runtime; private final Encoding binaryEncoding; private final Encoding utf8Encoding; private final RubyClass unpackErrorClass; private final RubyClass underflowErrorClass; private final RubyClass malformedFormatErrorClass; private final RubyClass stackErrorClass; private final RubyClass unexpectedTypeErrorClass; private final RubyClass unknownExtTypeErrorClass; private ExtensionRegistry registry; private ByteBuffer buffer; private boolean symbolizeKeys; private boolean freeze; private boolean allowUnknownExt; public Decoder(Ruby runtime) { this(runtime, null, new byte[] {}, 0, 0, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry) { this(runtime, registry, new byte[] {}, 0, 0, false, false, false); } public Decoder(Ruby runtime, byte[] bytes) { this(runtime, null, bytes, 0, bytes.length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes) { this(runtime, registry, bytes, 0, bytes.length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, boolean symbolizeKeys, boolean freeze, boolean allowUnknownExt) { this(runtime, registry, bytes, 0, bytes.length, symbolizeKeys, freeze, allowUnknownExt); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, int offset, int length) { this(runtime, registry, bytes, offset, length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, int offset, int length, boolean symbolizeKeys, boolean freeze, boolean allowUnknownExt) { this.runtime = runtime; this.registry = registry; this.symbolizeKeys = symbolizeKeys; this.freeze = freeze; this.allowUnknownExt = allowUnknownExt; this.binaryEncoding = runtime.getEncodingService().getAscii8bitEncoding(); this.utf8Encoding = UTF8Encoding.INSTANCE; this.unpackErrorClass = runtime.getModule("MessagePack").getClass("UnpackError"); this.underflowErrorClass = runtime.getModule("MessagePack").getClass("UnderflowError"); this.malformedFormatErrorClass = runtime.getModule("MessagePack").getClass("MalformedFormatError"); this.stackErrorClass = runtime.getModule("MessagePack").getClass("StackError"); this.unexpectedTypeErrorClass = runtime.getModule("MessagePack").getClass("UnexpectedTypeError"); this.unknownExtTypeErrorClass = runtime.getModule("MessagePack").getClass("UnknownExtTypeError"); this.symbolizeKeys = symbolizeKeys; this.allowUnknownExt = allowUnknownExt; feed(bytes, offset, length); } public void feed(byte[] bytes) { feed(bytes, 0, bytes.length); } public void feed(byte[] bytes, int offset, int length) { if (buffer == null) { buffer = ByteBuffer.wrap(bytes, offset, length); } else { ByteBuffer newBuffer = ByteBuffer.allocate(buffer.remaining() + length); newBuffer.put(buffer); newBuffer.put(bytes, offset, length); newBuffer.flip(); buffer = newBuffer; } } public void reset() { buffer = null; } public int offset() { return buffer.position(); } private IRubyObject consumeUnsignedLong() { long value = buffer.getLong(); if (value < 0) { return RubyBignum.newBignum(runtime, BigInteger.valueOf(value & ((1L<<63)-1)).setBit(63)); } else { return runtime.newFixnum(value); } } private IRubyObject consumeString(int size, Encoding encoding) { byte[] bytes = readBytes(size); ByteList byteList = new ByteList(bytes, encoding); RubyString string = runtime.newString(byteList); if (this.freeze) { string = runtime.freezeAndDedupString(string); } return string; } private IRubyObject consumeArray(int size) { IRubyObject[] elements = new IRubyObject[size]; for (int i = 0; i < size; i++) { elements[i] = next(); } return runtime.newArray(elements); } private IRubyObject consumeHash(int size) { RubyHash hash = RubyHash.newHash(runtime); for (int i = 0; i < size; i++) { IRubyObject key = next(); if (key instanceof RubyString) { if (this.symbolizeKeys) { key = ((RubyString) key).intern(); } else { key = runtime.freezeAndDedupString((RubyString) key); } } hash.fastASet(key, next()); } return hash; } private IRubyObject consumeExtension(int size) { int type = buffer.get(); byte[] payload = readBytes(size); if (registry != null) { IRubyObject proc = registry.lookupUnpackerByTypeId(type); if (proc != null) { ByteList byteList = new ByteList(payload, runtime.getEncodingService().getAscii8bitEncoding()); return proc.callMethod(runtime.getCurrentContext(), "call", runtime.newString(byteList)); } } if (this.allowUnknownExt) { return ExtensionValue.newExtensionValue(runtime, type, payload); } throw runtime.newRaiseException(unknownExtTypeErrorClass, "unexpected extension type"); } private byte[] readBytes(int size) { byte[] payload = new byte[size]; buffer.get(payload); return payload; } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public boolean hasNext() { return buffer.remaining() > 0; } public IRubyObject read_array_header() { int position = buffer.position(); try { byte b = buffer.get(); if ((b & 0xf0) == 0x90) { return runtime.newFixnum(b & 0x0f); } else if (b == ARY16) { return runtime.newFixnum(buffer.getShort() & 0xffff); } else if (b == ARY32) { return runtime.newFixnum(buffer.getInt()); } throw runtime.newRaiseException(unexpectedTypeErrorClass, "unexpected type"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } public IRubyObject read_map_header() { int position = buffer.position(); try { byte b = buffer.get(); if ((b & 0xf0) == 0x80) { return runtime.newFixnum(b & 0x0f); } else if (b == MAP16) { return runtime.newFixnum(buffer.getShort() & 0xffff); } else if (b == MAP32) { return runtime.newFixnum(buffer.getInt()); } throw runtime.newRaiseException(unexpectedTypeErrorClass, "unexpected type"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } @Override public IRubyObject next() { IRubyObject next = consumeNext(); if (freeze) { next.setFrozen(true); } return next; } private IRubyObject consumeNext() { int position = buffer.position(); try { byte b = buffer.get(); outer: switch ((b >> 4) & 0xf) { case 0x8: return consumeHash(b & 0x0f); case 0x9: return consumeArray(b & 0x0f); case 0xa: case 0xb: return consumeString(b & 0x1f, utf8Encoding); case 0xc: switch (b) { case NIL: return runtime.getNil(); case FALSE: return runtime.getFalse(); case TRUE: return runtime.getTrue(); case BIN8: return consumeString(buffer.get() & 0xff, binaryEncoding); case BIN16: return consumeString(buffer.getShort() & 0xffff, binaryEncoding); case BIN32: return consumeString(buffer.getInt(), binaryEncoding); case VAREXT8: return consumeExtension(buffer.get() & 0xff); case VAREXT16: return consumeExtension(buffer.getShort() & 0xffff); case VAREXT32: return consumeExtension(buffer.getInt()); case FLOAT32: return runtime.newFloat(buffer.getFloat()); case FLOAT64: return runtime.newFloat(buffer.getDouble()); case UINT8: return runtime.newFixnum(buffer.get() & 0xffL); case UINT16: return runtime.newFixnum(buffer.getShort() & 0xffffL); case UINT32: return runtime.newFixnum(buffer.getInt() & 0xffffffffL); case UINT64: return consumeUnsignedLong(); default: break outer; } case 0xd: switch (b) { case INT8: return runtime.newFixnum(buffer.get()); case INT16: return runtime.newFixnum(buffer.getShort()); case INT32: return runtime.newFixnum(buffer.getInt()); case INT64: return runtime.newFixnum(buffer.getLong()); case FIXEXT1: return consumeExtension(1); case FIXEXT2: return consumeExtension(2); case FIXEXT4: return consumeExtension(4); case FIXEXT8: return consumeExtension(8); case FIXEXT16: return consumeExtension(16); case STR8: return consumeString(buffer.get() & 0xff, utf8Encoding); case STR16: return consumeString(buffer.getShort() & 0xffff, utf8Encoding); case STR32: return consumeString(buffer.getInt(), utf8Encoding); case ARY16: return consumeArray(buffer.getShort() & 0xffff); case ARY32: return consumeArray(buffer.getInt()); case MAP16: return consumeHash(buffer.getShort() & 0xffff); case MAP32: return consumeHash(buffer.getInt()); default: break outer; } case 0xe: case 0xf: return runtime.newFixnum((0x1f & b) - 0x20); default: return runtime.newFixnum(b); } buffer.position(position); throw runtime.newRaiseException(malformedFormatErrorClass, "Illegal byte sequence"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } }
ext/java/org/msgpack/jruby/Decoder.java
package org.msgpack.jruby; import java.math.BigInteger; import java.nio.ByteBuffer; import java.nio.BufferUnderflowException; import java.util.Iterator; import java.util.Arrays; import org.jruby.Ruby; import org.jruby.RubyObject; import org.jruby.RubyClass; import org.jruby.RubyBignum; import org.jruby.RubyString; import org.jruby.RubyArray; import org.jruby.RubyHash; import org.jruby.exceptions.RaiseException; import org.jruby.runtime.builtin.IRubyObject; import org.jruby.util.ByteList; import org.jcodings.Encoding; import org.jcodings.specific.UTF8Encoding; import static org.msgpack.jruby.Types.*; public class Decoder implements Iterator<IRubyObject> { private final Ruby runtime; private final Encoding binaryEncoding; private final Encoding utf8Encoding; private final RubyClass unpackErrorClass; private final RubyClass underflowErrorClass; private final RubyClass malformedFormatErrorClass; private final RubyClass stackErrorClass; private final RubyClass unexpectedTypeErrorClass; private final RubyClass unknownExtTypeErrorClass; private ExtensionRegistry registry; private ByteBuffer buffer; private boolean symbolizeKeys; private boolean freeze; private boolean allowUnknownExt; public Decoder(Ruby runtime) { this(runtime, null, new byte[] {}, 0, 0, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry) { this(runtime, registry, new byte[] {}, 0, 0, false, false, false); } public Decoder(Ruby runtime, byte[] bytes) { this(runtime, null, bytes, 0, bytes.length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes) { this(runtime, registry, bytes, 0, bytes.length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, boolean symbolizeKeys, boolean freeze, boolean allowUnknownExt) { this(runtime, registry, bytes, 0, bytes.length, symbolizeKeys, freeze, allowUnknownExt); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, int offset, int length) { this(runtime, registry, bytes, offset, length, false, false, false); } public Decoder(Ruby runtime, ExtensionRegistry registry, byte[] bytes, int offset, int length, boolean symbolizeKeys, boolean freeze, boolean allowUnknownExt) { this.runtime = runtime; this.registry = registry; this.symbolizeKeys = symbolizeKeys; this.freeze = freeze; this.allowUnknownExt = allowUnknownExt; this.binaryEncoding = runtime.getEncodingService().getAscii8bitEncoding(); this.utf8Encoding = UTF8Encoding.INSTANCE; this.unpackErrorClass = runtime.getModule("MessagePack").getClass("UnpackError"); this.underflowErrorClass = runtime.getModule("MessagePack").getClass("UnderflowError"); this.malformedFormatErrorClass = runtime.getModule("MessagePack").getClass("MalformedFormatError"); this.stackErrorClass = runtime.getModule("MessagePack").getClass("StackError"); this.unexpectedTypeErrorClass = runtime.getModule("MessagePack").getClass("UnexpectedTypeError"); this.unknownExtTypeErrorClass = runtime.getModule("MessagePack").getClass("UnknownExtTypeError"); this.symbolizeKeys = symbolizeKeys; this.allowUnknownExt = allowUnknownExt; feed(bytes, offset, length); } public void feed(byte[] bytes) { feed(bytes, 0, bytes.length); } public void feed(byte[] bytes, int offset, int length) { if (buffer == null) { buffer = ByteBuffer.wrap(bytes, offset, length); } else { ByteBuffer newBuffer = ByteBuffer.allocate(buffer.remaining() + length); newBuffer.put(buffer); newBuffer.put(bytes, offset, length); newBuffer.flip(); buffer = newBuffer; } } public void reset() { buffer = null; } public int offset() { return buffer.position(); } private IRubyObject consumeUnsignedLong() { long value = buffer.getLong(); if (value < 0) { return RubyBignum.newBignum(runtime, BigInteger.valueOf(value & ((1L<<63)-1)).setBit(63)); } else { return runtime.newFixnum(value); } } private IRubyObject consumeString(int size, Encoding encoding) { byte[] bytes = readBytes(size); ByteList byteList = new ByteList(bytes, encoding); RubyString string = runtime.newString(byteList); if (this.freeze) { string.setFrozen(true); string = runtime.freezeAndDedupString(string); } return string; } private IRubyObject consumeArray(int size) { IRubyObject[] elements = new IRubyObject[size]; for (int i = 0; i < size; i++) { elements[i] = next(); } return runtime.newArray(elements); } private IRubyObject consumeHash(int size) { RubyHash hash = RubyHash.newHash(runtime); for (int i = 0; i < size; i++) { IRubyObject key = next(); if (key instanceof RubyString) { if (this.symbolizeKeys) { key = ((RubyString) key).intern(); } else { key.setFrozen(true); key = runtime.freezeAndDedupString((RubyString) key); } } hash.fastASet(key, next()); } return hash; } private IRubyObject consumeExtension(int size) { int type = buffer.get(); byte[] payload = readBytes(size); if (registry != null) { IRubyObject proc = registry.lookupUnpackerByTypeId(type); if (proc != null) { ByteList byteList = new ByteList(payload, runtime.getEncodingService().getAscii8bitEncoding()); return proc.callMethod(runtime.getCurrentContext(), "call", runtime.newString(byteList)); } } if (this.allowUnknownExt) { return ExtensionValue.newExtensionValue(runtime, type, payload); } throw runtime.newRaiseException(unknownExtTypeErrorClass, "unexpected extension type"); } private byte[] readBytes(int size) { byte[] payload = new byte[size]; buffer.get(payload); return payload; } @Override public void remove() { throw new UnsupportedOperationException(); } @Override public boolean hasNext() { return buffer.remaining() > 0; } public IRubyObject read_array_header() { int position = buffer.position(); try { byte b = buffer.get(); if ((b & 0xf0) == 0x90) { return runtime.newFixnum(b & 0x0f); } else if (b == ARY16) { return runtime.newFixnum(buffer.getShort() & 0xffff); } else if (b == ARY32) { return runtime.newFixnum(buffer.getInt()); } throw runtime.newRaiseException(unexpectedTypeErrorClass, "unexpected type"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } public IRubyObject read_map_header() { int position = buffer.position(); try { byte b = buffer.get(); if ((b & 0xf0) == 0x80) { return runtime.newFixnum(b & 0x0f); } else if (b == MAP16) { return runtime.newFixnum(buffer.getShort() & 0xffff); } else if (b == MAP32) { return runtime.newFixnum(buffer.getInt()); } throw runtime.newRaiseException(unexpectedTypeErrorClass, "unexpected type"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } @Override public IRubyObject next() { IRubyObject next = consumeNext(); if (freeze) { next.setFrozen(true); } return next; } private IRubyObject consumeNext() { int position = buffer.position(); try { byte b = buffer.get(); outer: switch ((b >> 4) & 0xf) { case 0x8: return consumeHash(b & 0x0f); case 0x9: return consumeArray(b & 0x0f); case 0xa: case 0xb: return consumeString(b & 0x1f, utf8Encoding); case 0xc: switch (b) { case NIL: return runtime.getNil(); case FALSE: return runtime.getFalse(); case TRUE: return runtime.getTrue(); case BIN8: return consumeString(buffer.get() & 0xff, binaryEncoding); case BIN16: return consumeString(buffer.getShort() & 0xffff, binaryEncoding); case BIN32: return consumeString(buffer.getInt(), binaryEncoding); case VAREXT8: return consumeExtension(buffer.get() & 0xff); case VAREXT16: return consumeExtension(buffer.getShort() & 0xffff); case VAREXT32: return consumeExtension(buffer.getInt()); case FLOAT32: return runtime.newFloat(buffer.getFloat()); case FLOAT64: return runtime.newFloat(buffer.getDouble()); case UINT8: return runtime.newFixnum(buffer.get() & 0xffL); case UINT16: return runtime.newFixnum(buffer.getShort() & 0xffffL); case UINT32: return runtime.newFixnum(buffer.getInt() & 0xffffffffL); case UINT64: return consumeUnsignedLong(); default: break outer; } case 0xd: switch (b) { case INT8: return runtime.newFixnum(buffer.get()); case INT16: return runtime.newFixnum(buffer.getShort()); case INT32: return runtime.newFixnum(buffer.getInt()); case INT64: return runtime.newFixnum(buffer.getLong()); case FIXEXT1: return consumeExtension(1); case FIXEXT2: return consumeExtension(2); case FIXEXT4: return consumeExtension(4); case FIXEXT8: return consumeExtension(8); case FIXEXT16: return consumeExtension(16); case STR8: return consumeString(buffer.get() & 0xff, utf8Encoding); case STR16: return consumeString(buffer.getShort() & 0xffff, utf8Encoding); case STR32: return consumeString(buffer.getInt(), utf8Encoding); case ARY16: return consumeArray(buffer.getShort() & 0xffff); case ARY32: return consumeArray(buffer.getInt()); case MAP16: return consumeHash(buffer.getShort() & 0xffff); case MAP32: return consumeHash(buffer.getInt()); default: break outer; } case 0xe: case 0xf: return runtime.newFixnum((0x1f & b) - 0x20); default: return runtime.newFixnum(b); } buffer.position(position); throw runtime.newRaiseException(malformedFormatErrorClass, "Illegal byte sequence"); } catch (RaiseException re) { buffer.position(position); throw re; } catch (BufferUnderflowException bue) { buffer.position(position); throw runtime.newRaiseException(underflowErrorClass, "Not enough bytes available"); } } }
freezeAndDedup should already set the frozen flag
ext/java/org/msgpack/jruby/Decoder.java
freezeAndDedup should already set the frozen flag
Java
apache-2.0
7ee521f112b3f8bb2f4742ccd22155a95b792b07
0
apiman/apiman-studio,Apicurio/apicurio-studio,Apicurio/apicurio-studio,apiman/apiman-studio,apiman/apiman-studio,apiman/apiman-studio,apiman/apiman-studio,Apicurio/apicurio-studio,Apicurio/apicurio-studio,Apicurio/apicurio-studio
/* * Copyright 2017 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apicurio.hub.api.rest.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.zip.ZipInputStream; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.StreamingOutput; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import io.apicurio.datamodels.Library; import io.apicurio.datamodels.core.models.Document; import io.apicurio.datamodels.core.models.DocumentType; import io.apicurio.datamodels.core.models.ValidationProblem; import io.apicurio.datamodels.core.models.ValidationProblemSeverity; import io.apicurio.datamodels.core.validation.IValidationSeverityRegistry; import io.apicurio.datamodels.core.validation.ValidationRuleMetaData; import io.apicurio.datamodels.openapi.models.OasDocument; import io.apicurio.hub.api.beans.CodegenLocation; import io.apicurio.hub.api.beans.ImportApiDesign; import io.apicurio.hub.api.beans.NewApiDesign; import io.apicurio.hub.api.beans.NewApiPublication; import io.apicurio.hub.api.beans.NewCodegenProject; import io.apicurio.hub.api.beans.ResourceContent; import io.apicurio.hub.api.beans.UpdateCodgenProject; import io.apicurio.hub.api.beans.UpdateCollaborator; import io.apicurio.hub.api.beans.ValidationError; import io.apicurio.hub.api.bitbucket.BitbucketResourceResolver; import io.apicurio.hub.api.codegen.OpenApi2JaxRs; import io.apicurio.hub.api.codegen.OpenApi2JaxRs.JaxRsProjectSettings; import io.apicurio.hub.api.codegen.OpenApi2Quarkus; import io.apicurio.hub.api.codegen.OpenApi2Thorntail; import io.apicurio.hub.api.connectors.ISourceConnector; import io.apicurio.hub.api.connectors.SourceConnectorException; import io.apicurio.hub.api.connectors.SourceConnectorFactory; import io.apicurio.hub.api.github.GitHubResourceResolver; import io.apicurio.hub.api.gitlab.GitLabResourceResolver; import io.apicurio.hub.api.metrics.IApiMetrics; import io.apicurio.hub.api.microcks.IMicrocksConnector; import io.apicurio.hub.api.microcks.MicrocksConnectorException; import io.apicurio.hub.api.rest.IDesignsResource; import io.apicurio.hub.api.security.ISecurityContext; import io.apicurio.hub.core.beans.ApiContentType; import io.apicurio.hub.core.beans.ApiDesign; import io.apicurio.hub.core.beans.ApiDesignChange; import io.apicurio.hub.core.beans.ApiDesignCollaborator; import io.apicurio.hub.core.beans.ApiDesignCommand; import io.apicurio.hub.core.beans.ApiDesignContent; import io.apicurio.hub.core.beans.ApiDesignResourceInfo; import io.apicurio.hub.core.beans.ApiDesignType; import io.apicurio.hub.core.beans.ApiMock; import io.apicurio.hub.core.beans.ApiPublication; import io.apicurio.hub.core.beans.CodegenProject; import io.apicurio.hub.core.beans.CodegenProjectType; import io.apicurio.hub.core.beans.Contributor; import io.apicurio.hub.core.beans.FormatType; import io.apicurio.hub.core.beans.Invitation; import io.apicurio.hub.core.beans.LinkedAccountType; import io.apicurio.hub.core.beans.MockReference; import io.apicurio.hub.core.beans.SharingConfiguration; import io.apicurio.hub.core.beans.SharingLevel; import io.apicurio.hub.core.beans.UpdateSharingConfiguration; import io.apicurio.hub.core.cmd.OaiCommandException; import io.apicurio.hub.core.cmd.OaiCommandExecutor; import io.apicurio.hub.core.config.HubConfiguration; import io.apicurio.hub.core.editing.IEditingSessionManager; import io.apicurio.hub.core.exceptions.AccessDeniedException; import io.apicurio.hub.core.exceptions.ApiValidationException; import io.apicurio.hub.core.exceptions.NotFoundException; import io.apicurio.hub.core.exceptions.ServerError; import io.apicurio.hub.core.storage.IStorage; import io.apicurio.hub.core.storage.StorageException; import io.apicurio.hub.core.util.FormatUtils; /** * @author eric.wittmann@gmail.com */ @ApplicationScoped public class DesignsResource implements IDesignsResource { private static Logger logger = LoggerFactory.getLogger(DesignsResource.class); private static ObjectMapper mapper = new ObjectMapper(); static { mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(Include.NON_NULL); } @Inject private HubConfiguration config; @Inject private IStorage storage; @Inject private SourceConnectorFactory sourceConnectorFactory; @Inject private ISecurityContext security; @Inject private IApiMetrics metrics; @Inject private OaiCommandExecutor oaiCommandExecutor; @Inject private IEditingSessionManager editingSessionManager; @Inject private IMicrocksConnector microcks; @Context private HttpServletRequest request; @Context private HttpServletResponse response; @Inject private GitLabResourceResolver gitLabResolver; @Inject private GitHubResourceResolver gitHubResolver; @Inject private BitbucketResourceResolver bitbucketResolver; /** * @see io.apicurio.hub.api.rest.IDesignsResource#listDesigns() */ @Override public Collection<ApiDesign> listDesigns() throws ServerError { metrics.apiCall("/designs", "GET"); try { logger.debug("Listing API Designs"); String user = this.security.getCurrentUser().getLogin(); Collection<ApiDesign> designs = this.storage.listApiDesigns(user); return designs; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#importDesign(io.apicurio.hub.api.beans.ImportApiDesign) */ @Override public ApiDesign importDesign(ImportApiDesign info) throws ServerError, NotFoundException, ApiValidationException { metrics.apiCall("/designs", "PUT"); if (info.getData() != null && !info.getData().trim().isEmpty()) { logger.debug("Importing an API Design (from data)."); return importDesignFromData(info); } else { logger.debug("Importing an API Design: {}", info.getUrl()); if (info.getUrl() == null) { throw new ApiValidationException("No data provided to import."); } ISourceConnector connector = null; try { connector = this.sourceConnectorFactory.createConnector(info.getUrl()); } catch (NotFoundException nfe) { // This means it's not a source control URL. So we'll treat it as a raw content URL. connector = null; } if (connector != null) { return importDesignFromSource(info, connector); } else { return importDesignFromUrl(info); } } } /** * Imports an API Design from one of the source control systems using its API. * @param info * @param connector * @throws NotFoundException * @throws ServerError * @throws ApiValidationException */ private ApiDesign importDesignFromSource(ImportApiDesign info, ISourceConnector connector) throws NotFoundException, ServerError, ApiValidationException { try { ApiDesignResourceInfo resourceInfo = connector.validateResourceExists(info.getUrl()); ResourceContent initialApiContent = connector.getResourceContent(info.getUrl()); Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); String description = resourceInfo.getDescription(); if (description == null) { description = ""; } ApiDesign design = new ApiDesign(); design.setName(resourceInfo.getName()); design.setDescription(description); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { String content = initialApiContent.getContent(); if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(connector.getType()); return design; } catch (SourceConnectorException | IOException e) { throw new ServerError(e); } } /** * Imports an API Design from base64 encoded content included in the request. This supports * the use-case where the UI allows the user to simply copy/paste the full API content. * @param info * @throws ServerError */ private ApiDesign importDesignFromData(ImportApiDesign info) throws ServerError, ApiValidationException { try { String data = info.getData(); byte[] decodedData = Base64.decodeBase64(data); try (InputStream is = new ByteArrayInputStream(decodedData)) { String content = IOUtils.toString(is, "UTF-8"); ApiDesignResourceInfo resourceInfo = ApiDesignResourceInfo.fromContent(content); String name = resourceInfo.getName(); if (name == null) { name = "Imported API Design"; } Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); ApiDesign design = new ApiDesign(); design.setName(name); design.setDescription(resourceInfo.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(null); return design; } } catch (IOException e) { throw new ServerError(e); } catch (ApiValidationException ave) { throw ave; } catch (Exception e) { throw new ServerError(e); } } /** * Imports an API design from an arbitrary URL. This simply opens a connection to that * URL and tries to consume its content as an OpenAPI document. * @param info * @throws NotFoundException * @throws ServerError * @throws ApiValidationException */ private ApiDesign importDesignFromUrl(ImportApiDesign info) throws NotFoundException, ServerError, ApiValidationException { try { URL url = new URL(info.getUrl()); try (InputStream is = url.openStream()) { String content = IOUtils.toString(is, "UTF-8"); ApiDesignResourceInfo resourceInfo = ApiDesignResourceInfo.fromContent(content); String name = resourceInfo.getName(); if (name == null) { name = url.getPath(); if (name != null && name.indexOf("/") >= 0) { name = name.substring(name.indexOf("/") + 1); } } if (name == null) { name = "Imported API Design"; } Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); ApiDesign design = new ApiDesign(); design.setName(name); design.setDescription(resourceInfo.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(null); return design; } } catch (ApiValidationException ave) { throw ave; } catch (IOException e) { throw new ServerError(e); } catch (Exception e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createDesign(io.apicurio.hub.api.beans.NewApiDesign) */ @Override public ApiDesign createDesign(NewApiDesign info) throws ServerError { logger.debug("Creating an API Design: {}", info.getName()); metrics.apiCall("/designs", "POST"); try { Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); // The API Design meta-data ApiDesign design = new ApiDesign(); design.setName(info.getName()); design.setDescription(info.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); // The API Design content (OAI document) OasDocument doc; if (info.getSpecVersion() == null || info.getSpecVersion().equals("2.0")) { doc = (OasDocument) Library.createDocument(DocumentType.openapi2); design.setType(ApiDesignType.OpenAPI20); } else { doc = (OasDocument) Library.createDocument(DocumentType.openapi3); design.setType(ApiDesignType.OpenAPI30); } doc.info = doc.createInfo(); doc.info.title = info.getName(); doc.info.description = info.getDescription(); doc.info.version = "1.0.0"; String oaiContent = Library.writeDocumentToJSONString(doc); // Create the API Design in the database String designId = storage.createApiDesign(user, design, oaiContent); design.setId(designId); metrics.apiCreate(info.getSpecVersion()); return design; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getDesign(java.lang.String) */ @Override public ApiDesign getDesign(String designId) throws ServerError, NotFoundException { logger.debug("Getting an API design with ID {}", designId); metrics.apiCall("/designs/{designId}", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); return design; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#editDesign(java.lang.String) */ @Override public Response editDesign(String designId) throws ServerError, NotFoundException { logger.debug("Editing an API Design with ID {}", designId); metrics.apiCall("/designs/{designId}/session", "GET"); try { String user = this.security.getCurrentUser().getLogin(); logger.debug("\tUSER: {}", user); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); String content = designContent.getDocument(); long contentVersion = designContent.getContentVersion(); String secret = this.security.getToken().substring(0, Math.min(64, this.security.getToken().length() - 1)); String sessionId = this.editingSessionManager.createSessionUuid(designId, user, secret, contentVersion); logger.debug("\tCreated Session ID: {}", sessionId); logger.debug("\t Secret: {}", secret); byte[] bytes = content.getBytes(StandardCharsets.UTF_8); String ct = "application/json; charset=" + StandardCharsets.UTF_8; String cl = String.valueOf(bytes.length); ResponseBuilder builder = Response.ok().entity(content) .header("X-Apicurio-EditingSessionUuid", sessionId) .header("X-Apicurio-ContentVersion", contentVersion) .header("Content-Type", ct) .header("Content-Length", cl); return builder.build(); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteDesign(java.lang.String) */ @Override public void deleteDesign(String designId) throws ServerError, NotFoundException { logger.debug("Deleting an API Design with ID {}", designId); metrics.apiCall("/designs/{designId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); this.storage.deleteApiDesign(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getContributors(java.lang.String) */ @Override public Collection<Contributor> getContributors(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving contributors list for design with ID: {}", designId); metrics.apiCall("/designs/{designId}/contributors", "GET"); try { String user = this.security.getCurrentUser().getLogin(); return this.storage.listContributors(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getContent(java.lang.String, java.lang.String) */ @Override public Response getContent(String designId, String format) throws ServerError, NotFoundException { logger.debug("Getting content for API design with ID: {}", designId); metrics.apiCall("/designs/{designId}/content", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); List<ApiDesignCommand> apiCommands = this.storage.listContentCommands(user, designId, designContent.getContentVersion()); List<String> commands = new ArrayList<>(apiCommands.size()); for (ApiDesignCommand apiCommand : apiCommands) { commands.add(apiCommand.getCommand()); } String content = this.oaiCommandExecutor.executeCommands(designContent.getDocument(), commands); String ct = "application/json; charset=" + StandardCharsets.UTF_8; String cl = null; // Convert to yaml if necessary if ("yaml".equals(format)) { content = FormatUtils.jsonToYaml(content); ct = "application/x-yaml; charset=" + StandardCharsets.UTF_8; } byte[] bytes = content.getBytes(StandardCharsets.UTF_8); cl = String.valueOf(bytes.length); ResponseBuilder builder = Response.ok().entity(content) .header("Content-Type", ct) .header("Content-Length", cl); return builder.build(); } catch (StorageException | OaiCommandException | IOException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createInvitation(java.lang.String) */ @Override public Invitation createInvitation(String designId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Creating a collaboration invitation for API: {} ", designId); metrics.apiCall("/designs/{designId}/invitations", "POST"); try { String user = this.security.getCurrentUser().getLogin(); String username = this.security.getCurrentUser().getName(); String inviteId = UUID.randomUUID().toString(); ApiDesign design = this.storage.getApiDesign(user, designId); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.createCollaborationInvite(inviteId, designId, user, username, "collaborator", design.getName()); Invitation invite = new Invitation(); invite.setCreatedBy(user); invite.setCreatedOn(new Date()); invite.setDesignId(designId); invite.setInviteId(inviteId); invite.setStatus("pending"); return invite; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getInvitation(java.lang.String, java.lang.String) */ @Override public Invitation getInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Retrieving a collaboration invitation for API: {} and inviteID: {}", designId, inviteId); metrics.apiCall("/designs/{designId}/invitations/{inviteId}", "GET"); try { return this.storage.getCollaborationInvite(designId, inviteId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getInvitations(java.lang.String) */ @Override public Collection<Invitation> getInvitations(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving all collaboration invitations for API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "GET"); try { String user = this.security.getCurrentUser().getLogin(); return this.storage.listCollaborationInvites(designId, user); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#acceptInvitation(java.lang.String, java.lang.String) */ @Override public void acceptInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Accepting an invitation to collaborate on an API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); Invitation invite = this.storage.getCollaborationInvite(designId, inviteId); if (this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } boolean accepted = this.storage.updateCollaborationInviteStatus(inviteId, "pending", "accepted", user); if (!accepted) { throw new NotFoundException(); } this.storage.createPermission(designId, user, invite.getRole()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#rejectInvitation(java.lang.String, java.lang.String) */ @Override public void rejectInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Rejecting an invitation to collaborate on an API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); // This will ensure that the invitation exists for this designId. this.storage.getCollaborationInvite(designId, inviteId); boolean accepted = this.storage.updateCollaborationInviteStatus(inviteId, "pending", "rejected", user); if (!accepted) { throw new NotFoundException(); } } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCollaborators(java.lang.String) */ @Override public Collection<ApiDesignCollaborator> getCollaborators(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving all collaborators for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators", "GET"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listPermissions(designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#updateCollaborator(java.lang.String, java.lang.String, io.apicurio.hub.api.beans.UpdateCollaborator) */ @Override public void updateCollaborator(String designId, String userId, UpdateCollaborator update) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Updating collaborator for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators/{userId}", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.updatePermission(designId, userId, update.getNewRole()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCollaborator(java.lang.String, java.lang.String) */ @Override public void deleteCollaborator(String designId, String userId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting/revoking collaborator for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators/{userId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deletePermission(designId, userId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getActivity(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiDesignChange> getActivity(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { if (!config.isShareForEveryone()) { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } } return this.storage.listApiDesignActivity(designId, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getPublications(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiPublication> getPublications(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listApiDesignPublicationsBy(designId, user, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#publishApi(java.lang.String, io.apicurio.hub.api.beans.NewApiPublication) */ @Override public void publishApi(String designId, NewApiPublication info) throws ServerError, NotFoundException { LinkedAccountType type = info.getType(); try { // First step - publish the content to the soruce control system ISourceConnector connector = this.sourceConnectorFactory.createConnector(type); String resourceUrl = toResourceUrl(info); String formattedContent = getApiContent(designId, info.getFormat()); try { ResourceContent content = connector.getResourceContent(resourceUrl); content.setContent(formattedContent); connector.updateResourceContent(resourceUrl, info.getCommitMessage(), null, content); } catch (NotFoundException nfe) { connector.createResourceContent(resourceUrl, info.getCommitMessage(), formattedContent); } // Followup step - store a row in the api_content table try { String user = this.security.getCurrentUser().getLogin(); String publicationData = createPublicationData(info); storage.addContent(user, designId, ApiContentType.Publish, publicationData); } catch (Exception e) { logger.error("Failed to record API publication in database.", e); } } catch (SourceConnectorException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#mockApi(java.lang.String) */ @Override public MockReference mockApi(String designId) throws ServerError, NotFoundException { try { // First step - publish the content to the Microcks server API String content = getApiContent(designId, FormatType.YAML); String serviceRef = this.microcks.uploadResourceContent(content); // Build mockURL from microcksURL. String mockURL = null; String microcksURL = config.getMicrocksApiUrl(); try { mockURL = microcksURL.substring(0, microcksURL.indexOf("/api")) + "/#/services/" + URLEncoder.encode(serviceRef, "UTF-8"); } catch (Exception e) { logger.error("Failed to produce a valid mockURL", e); } // Followup step - store a row in the api_content table try { String user = this.security.getCurrentUser().getLogin(); String mockData = createMockData(serviceRef, mockURL); storage.addContent(user, designId, ApiContentType.Mock, mockData); } catch (Exception e) { logger.error("Failed to record API mock publication in database.", e); } // Finally return response. MockReference mockRef = new MockReference(); mockRef.setMockType("microcks"); mockRef.setServiceRef(serviceRef); mockRef.setMockURL(mockURL); return mockRef; } catch (MicrocksConnectorException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getMocks(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiMock> getMocks(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listApiDesignMocks(designId, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * Creates the JSON data to be stored in the data row representing a "publish API" event * (also known as an API publication). * @param info */ private String createPublicationData(NewApiPublication info) { try { ObjectMapper mapper = new ObjectMapper(); ObjectNode data = JsonNodeFactory.instance.objectNode(); data.set("type", JsonNodeFactory.instance.textNode(info.getType().name())); data.set("org", JsonNodeFactory.instance.textNode(info.getOrg())); data.set("repo", JsonNodeFactory.instance.textNode(info.getRepo())); data.set("team", JsonNodeFactory.instance.textNode(info.getTeam())); data.set("group", JsonNodeFactory.instance.textNode(info.getGroup())); data.set("project", JsonNodeFactory.instance.textNode(info.getProject())); data.set("branch", JsonNodeFactory.instance.textNode(info.getBranch())); data.set("resource", JsonNodeFactory.instance.textNode(info.getResource())); data.set("format", JsonNodeFactory.instance.textNode(info.getFormat().name())); data.set("commitMessage", JsonNodeFactory.instance.textNode(info.getCommitMessage())); return mapper.writeValueAsString(data); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } /** * Create the JSON data to be stored in the data row representing a "mock API" event * (also know as an API mock publication). * @param serviceRef The service reference as returned by Microcks * @param mockURL The URL for accessing description page on Microcks server */ private String createMockData(String serviceRef, String mockURL) { try { ObjectMapper mapper = new ObjectMapper(); ObjectNode data = JsonNodeFactory.instance.objectNode(); data.set("mockType", JsonNodeFactory.instance.textNode("microcks")); data.set("serviceRef", JsonNodeFactory.instance.textNode(serviceRef)); data.set("mockURL", JsonNodeFactory.instance.textNode(mockURL)); return mapper.writeValueAsString(data); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } /** * Gets the current content of an API. * @param designId * @param format * @throws ServerError * @throws NotFoundException */ private String getApiContent(String designId, FormatType format) throws ServerError, NotFoundException { try { String user = this.security.getCurrentUser().getLogin(); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); String content = designContent.getDocument(); List<ApiDesignCommand> apiCommands = this.storage.listContentCommands(user, designId, designContent.getContentVersion()); if (!apiCommands.isEmpty()) { List<String> commands = new ArrayList<>(apiCommands.size()); for (ApiDesignCommand apiCommand : apiCommands) { commands.add(apiCommand.getCommand()); } content = this.oaiCommandExecutor.executeCommands(designContent.getDocument(), commands); } // Convert to yaml if necessary if (format == FormatType.YAML) { content = FormatUtils.jsonToYaml(content); } else { content = FormatUtils.formatJson(content); } return content; } catch (StorageException | OaiCommandException | IOException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCodegenProjects(java.lang.String) */ @Override public Collection<CodegenProject> getCodegenProjects(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving codegen project list for design with ID: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); return this.storage.listCodegenProjects(user, design.getId()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createCodegenProject(java.lang.String, io.apicurio.hub.api.beans.NewCodegenProject) */ @Override public CodegenProject createCodegenProject(String designId, NewCodegenProject body) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Creating a codegen project for API: {} ", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "POST"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = new CodegenProject(); Date now = new Date(); project.setCreatedBy(user); project.setCreatedOn(now); project.setModifiedBy(user); project.setModifiedOn(now); project.setDesignId(design.getId()); project.setType(body.getProjectType()); project.setAttributes(new HashMap<String, String>()); if (body.getProjectConfig() != null) { project.getAttributes().putAll(body.getProjectConfig()); } project.getAttributes().put("location", body.getLocation().toString()); project.getAttributes().put("update-only", Boolean.FALSE.toString()); if (body.getPublishInfo() != null) { if (body.getPublishInfo().getType() != null) { project.getAttributes().put("publish-type", body.getPublishInfo().getType().toString()); } project.getAttributes().put("publish-branch", body.getPublishInfo().getBranch()); project.getAttributes().put("publish-commitMessage", body.getPublishInfo().getCommitMessage()); project.getAttributes().put("publish-group", body.getPublishInfo().getGroup()); project.getAttributes().put("publish-location", body.getPublishInfo().getLocation()); project.getAttributes().put("publish-org", body.getPublishInfo().getOrg()); project.getAttributes().put("publish-project", body.getPublishInfo().getProject()); project.getAttributes().put("publish-repo", body.getPublishInfo().getRepo()); project.getAttributes().put("publish-team", body.getPublishInfo().getTeam()); } if (body.getLocation() == CodegenLocation.download) { // Nothing extra to do when downloading - that will be handled by a separate call } if (body.getLocation() == CodegenLocation.sourceControl) { String prUrl = generateAndPublishProject(project, false); project.getAttributes().put("pullRequest-url", prUrl); } String projectId = this.storage.createCodegenProject(user, project); project.setId(projectId); return project; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCodegenProjectAsZip(java.lang.String, java.lang.String) */ @Override public Response getCodegenProjectAsZip(String designId, String projectId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Downloading a codegen project for API Design with ID {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}/zip", "GET"); String user = this.security.getCurrentUser().getLogin(); try { if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = this.storage.getCodegenProject(user, designId, projectId); String oaiContent = this.getApiContent(designId, FormatType.JSON); // TODO support other types besides Thorntail if (project.getType() == CodegenProjectType.thorntail) { JaxRsProjectSettings settings = toJaxRsSettings(project); boolean updateOnly = "true".equals(project.getAttributes().get("update-only")); final OpenApi2Thorntail generator = new OpenApi2Thorntail(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return asResponse(settings, generator); } else if (project.getType() == CodegenProjectType.jaxrs) { JaxRsProjectSettings settings = toJaxRsSettings(project); boolean updateOnly = "true".equals(project.getAttributes().get("update-only")); final OpenApi2JaxRs generator = new OpenApi2JaxRs(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return asResponse(settings, generator); } else if (project.getType() == CodegenProjectType.quarkus) { JaxRsProjectSettings settings = toJaxRsSettings(project); boolean updateOnly = "true".equals(project.getAttributes().get("update-only")); final OpenApi2Quarkus generator = new OpenApi2Quarkus(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return asResponse(settings, generator); } else { throw new ServerError("Unsupported project type: " + project.getType()); } } catch (StorageException e) { throw new ServerError(e); } } /** * Generates the project and returns the result as a streaming response. * @param settings * @param generator */ private Response asResponse(JaxRsProjectSettings settings, final OpenApi2JaxRs generator) { StreamingOutput stream = new StreamingOutput() { @Override public void write(OutputStream output) throws IOException, WebApplicationException { generator.generate(output); } }; String fname = settings.artifactId + ".zip"; ResponseBuilder builder = Response.ok().entity(stream) .header("Content-Disposition", "attachment; filename=\"" + fname + "\"") .header("Content-Type", "application/zip"); return builder.build(); } /** * @see io.apicurio.hub.api.rest.IDesignsResource#updateCodegenProject(java.lang.String, java.lang.String, io.apicurio.hub.api.beans.UpdateCodgenProject) */ @Override public CodegenProject updateCodegenProject(String designId, String projectId, UpdateCodgenProject body) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Updating codegen project for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = this.storage.getCodegenProject(user, designId, projectId); project.setType(body.getProjectType()); project.setAttributes(new HashMap<String, String>()); if (body.getProjectConfig() != null) { project.getAttributes().putAll(body.getProjectConfig()); } project.getAttributes().put("location", body.getLocation().toString()); project.getAttributes().put("update-only", Boolean.TRUE.toString()); if (body.getPublishInfo() != null) { if (body.getPublishInfo().getType() != null) { project.getAttributes().put("publish-type", body.getPublishInfo().getType().toString()); } project.getAttributes().put("publish-branch", body.getPublishInfo().getBranch()); project.getAttributes().put("publish-commitMessage", body.getPublishInfo().getCommitMessage()); project.getAttributes().put("publish-group", body.getPublishInfo().getGroup()); project.getAttributes().put("publish-location", body.getPublishInfo().getLocation()); project.getAttributes().put("publish-org", body.getPublishInfo().getOrg()); project.getAttributes().put("publish-project", body.getPublishInfo().getProject()); project.getAttributes().put("publish-repo", body.getPublishInfo().getRepo()); project.getAttributes().put("publish-team", body.getPublishInfo().getTeam()); } if (body.getLocation() == CodegenLocation.download) { // Nothing extra to do when downloading - that will be handled by a separate call } if (body.getLocation() == CodegenLocation.sourceControl) { String prUrl = generateAndPublishProject(project, true); project.getAttributes().put("pullRequest-url", prUrl); } this.storage.updateCodegenProject(user, project); return project; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCodegenProject(java.lang.String, java.lang.String) */ @Override public void deleteCodegenProject(String designId, String projectId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting codegen project for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deleteCodegenProject(user, designId, projectId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCodegenProjects(java.lang.String) */ @Override public void deleteCodegenProjects(String designId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting ALL codegen projects for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deleteCodegenProjects(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * Generate and publish (to a git/source control system) a project. This will * generate a project from the OpenAPI document and then publish the result to * a soruce control platform. * @param project * @param updateOnly * @return the URL of the published pull request */ private String generateAndPublishProject(CodegenProject project, boolean updateOnly) throws ServerError, NotFoundException { try { String oaiContent = this.getApiContent(project.getDesignId(), FormatType.JSON); // TODO support other types besides JAX-RS if (project.getType() == CodegenProjectType.thorntail) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2Thorntail generator = new OpenApi2Thorntail(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else if (project.getType() == CodegenProjectType.jaxrs) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2JaxRs generator = new OpenApi2JaxRs(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else if (project.getType() == CodegenProjectType.quarkus) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2Quarkus generator = new OpenApi2Quarkus(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else { throw new ServerError("Unsupported project type: " + project.getType()); } } catch (IOException | SourceConnectorException e) { throw new ServerError(e); } } /** * Generates the project and publishes the result to e.g. GitHub. * @param project * @param generator * @throws IOException * @throws NotFoundException * @throws SourceConnectorException */ private String generateAndPublish(CodegenProject project, OpenApi2JaxRs generator) throws IOException, NotFoundException, SourceConnectorException { ByteArrayOutputStream generatedContent = generator.generate(); LinkedAccountType scsType = LinkedAccountType.valueOf(project.getAttributes().get("publish-type")); ISourceConnector connector = this.sourceConnectorFactory.createConnector(scsType); String url = toSourceResourceUrl(project); String commitMessage = project.getAttributes().get("publish-commitMessage"); String pullRequestUrl = connector.createPullRequestFromZipContent(url, commitMessage, new ZipInputStream(new ByteArrayInputStream(generatedContent.toByteArray()))); return pullRequestUrl; } /** * Reads JAX-RS project settings from the project. * @param project */ private JaxRsProjectSettings toJaxRsSettings(CodegenProject project) { boolean codeOnly = "true".equals(project.getAttributes().get("codeOnly")); boolean reactive = "true".equals(project.getAttributes().get("reactive")); String groupId = project.getAttributes().get("groupId"); String artifactId = project.getAttributes().get("artifactId"); String javaPackage = project.getAttributes().get("javaPackage"); JaxRsProjectSettings settings = new JaxRsProjectSettings(); settings.codeOnly = codeOnly; settings.reactive = reactive; settings.groupId = groupId != null ? groupId : "org.example.api"; settings.artifactId = artifactId != null ? artifactId : "generated-api"; settings.javaPackage = javaPackage != null ? javaPackage : "org.example.api"; return settings; } /** * Creates a source control resource URL from the information found in the codegen project. * @param project */ private String toSourceResourceUrl(CodegenProject project) { LinkedAccountType scsType = LinkedAccountType.valueOf(project.getAttributes().get("publish-type")); String url; switch (scsType) { case Bitbucket: { String team = project.getAttributes().get("publish-team"); String repo = project.getAttributes().get("publish-repo"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = bitbucketResolver.create(team, repo, branch, path); } break; case GitHub: { String org = project.getAttributes().get("publish-org"); String repo = project.getAttributes().get("publish-repo"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = gitHubResolver.create(org, repo, branch, path); } break; case GitLab: { String group = project.getAttributes().get("publish-group"); String proj = project.getAttributes().get("publish-project"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = gitLabResolver.create(group, proj, branch, path); } break; default: throw new RuntimeException("Unsupported type: " + scsType); } return url; } /** * Uses the information in the bean to create a resource URL. */ private String toResourceUrl(NewApiPublication info) { if (info.getType() == LinkedAccountType.GitHub) { return gitHubResolver.create(info.getOrg(), info.getRepo(), info.getBranch(), info.getResource()); } if (info.getType() == LinkedAccountType.GitLab) { return gitLabResolver.create(info.getGroup(), info.getProject(), info.getBranch(), info.getResource()); } if (info.getType() == LinkedAccountType.Bitbucket) { return bitbucketResolver.create(info.getTeam(), info.getRepo(), info.getBranch(), info.getResource()); } return null; } /** * @see io.apicurio.hub.api.rest.IDesignsResource#validateDesign(java.lang.String) */ @Override public List<ValidationError> validateDesign(String designId) throws ServerError, NotFoundException { logger.debug("Validating API design with ID: {}", designId); metrics.apiCall("/designs/{designId}/validation", "GET"); String content = this.getApiContent(designId, FormatType.JSON); Document doc = Library.readDocumentFromJSONString(content); List<ValidationProblem> problems = Library.validate(doc, new IValidationSeverityRegistry() { @Override public ValidationProblemSeverity lookupSeverity(ValidationRuleMetaData rule) { return ValidationProblemSeverity.high; } }); List<ValidationError> errors = new ArrayList<>(); for (ValidationProblem problem : problems) { errors.add(new ValidationError(problem.errorCode, problem.nodePath.toString(), problem.property, problem.message, problem.severity.name())); } return errors; } /** * @see io.apicurio.hub.api.rest.IDesignsResource#configureSharing(java.lang.String, io.apicurio.hub.core.beans.UpdateSharingConfiguration) */ @Override public SharingConfiguration configureSharing(String designId, UpdateSharingConfiguration config) throws ServerError, NotFoundException { logger.debug("Configuring sharing settings for API: {} ", designId); metrics.apiCall("/designs/{designId}/sharing", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); String uuid = UUID.randomUUID().toString(); // Note: only used if this is the first time if (!this.storage.hasOwnerPermission(user, designId)) { throw new NotFoundException(); } this.storage.setSharingConfig(designId, uuid, config.getLevel()); return this.storage.getSharingConfig(designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getSharingConfiguration(java.lang.String) */ @Override public SharingConfiguration getSharingConfiguration(String designId) throws ServerError, NotFoundException { logger.debug("Getting sharing settings for API: {} ", designId); metrics.apiCall("/designs/{designId}/sharing", "GET"); // Make sure we have access to the design. this.getDesign(designId); try { SharingConfiguration sharingConfig = this.storage.getSharingConfig(designId); if (sharingConfig == null) { sharingConfig = new SharingConfiguration(); sharingConfig.setLevel(SharingLevel.NONE); } return sharingConfig; } catch (StorageException e) { throw new ServerError(e); } } }
back-end/hub-api/src/main/java/io/apicurio/hub/api/rest/impl/DesignsResource.java
/* * Copyright 2017 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.apicurio.hub.api.rest.impl; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URL; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.UUID; import java.util.zip.ZipInputStream; import javax.enterprise.context.ApplicationScoped; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import javax.ws.rs.core.StreamingOutput; import org.apache.commons.codec.binary.Base64; import org.apache.commons.io.IOUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; import io.apicurio.datamodels.Library; import io.apicurio.datamodels.core.models.Document; import io.apicurio.datamodels.core.models.DocumentType; import io.apicurio.datamodels.core.models.ValidationProblem; import io.apicurio.datamodels.core.models.ValidationProblemSeverity; import io.apicurio.datamodels.core.validation.IValidationSeverityRegistry; import io.apicurio.datamodels.core.validation.ValidationRuleMetaData; import io.apicurio.datamodels.openapi.models.OasDocument; import io.apicurio.hub.api.beans.CodegenLocation; import io.apicurio.hub.api.beans.ImportApiDesign; import io.apicurio.hub.api.beans.NewApiDesign; import io.apicurio.hub.api.beans.NewApiPublication; import io.apicurio.hub.api.beans.NewCodegenProject; import io.apicurio.hub.api.beans.ResourceContent; import io.apicurio.hub.api.beans.UpdateCodgenProject; import io.apicurio.hub.api.beans.UpdateCollaborator; import io.apicurio.hub.api.beans.ValidationError; import io.apicurio.hub.api.bitbucket.BitbucketResourceResolver; import io.apicurio.hub.api.codegen.OpenApi2JaxRs; import io.apicurio.hub.api.codegen.OpenApi2JaxRs.JaxRsProjectSettings; import io.apicurio.hub.api.codegen.OpenApi2Quarkus; import io.apicurio.hub.api.codegen.OpenApi2Thorntail; import io.apicurio.hub.api.connectors.ISourceConnector; import io.apicurio.hub.api.connectors.SourceConnectorException; import io.apicurio.hub.api.connectors.SourceConnectorFactory; import io.apicurio.hub.api.github.GitHubResourceResolver; import io.apicurio.hub.api.gitlab.GitLabResourceResolver; import io.apicurio.hub.api.metrics.IApiMetrics; import io.apicurio.hub.api.microcks.IMicrocksConnector; import io.apicurio.hub.api.microcks.MicrocksConnectorException; import io.apicurio.hub.api.rest.IDesignsResource; import io.apicurio.hub.api.security.ISecurityContext; import io.apicurio.hub.core.beans.ApiContentType; import io.apicurio.hub.core.beans.ApiDesign; import io.apicurio.hub.core.beans.ApiDesignChange; import io.apicurio.hub.core.beans.ApiDesignCollaborator; import io.apicurio.hub.core.beans.ApiDesignCommand; import io.apicurio.hub.core.beans.ApiDesignContent; import io.apicurio.hub.core.beans.ApiDesignResourceInfo; import io.apicurio.hub.core.beans.ApiDesignType; import io.apicurio.hub.core.beans.ApiMock; import io.apicurio.hub.core.beans.ApiPublication; import io.apicurio.hub.core.beans.CodegenProject; import io.apicurio.hub.core.beans.CodegenProjectType; import io.apicurio.hub.core.beans.Contributor; import io.apicurio.hub.core.beans.FormatType; import io.apicurio.hub.core.beans.Invitation; import io.apicurio.hub.core.beans.LinkedAccountType; import io.apicurio.hub.core.beans.MockReference; import io.apicurio.hub.core.beans.SharingConfiguration; import io.apicurio.hub.core.beans.SharingLevel; import io.apicurio.hub.core.beans.UpdateSharingConfiguration; import io.apicurio.hub.core.cmd.OaiCommandException; import io.apicurio.hub.core.cmd.OaiCommandExecutor; import io.apicurio.hub.core.config.HubConfiguration; import io.apicurio.hub.core.editing.IEditingSessionManager; import io.apicurio.hub.core.exceptions.AccessDeniedException; import io.apicurio.hub.core.exceptions.ApiValidationException; import io.apicurio.hub.core.exceptions.NotFoundException; import io.apicurio.hub.core.exceptions.ServerError; import io.apicurio.hub.core.storage.IStorage; import io.apicurio.hub.core.storage.StorageException; import io.apicurio.hub.core.util.FormatUtils; /** * @author eric.wittmann@gmail.com */ @ApplicationScoped public class DesignsResource implements IDesignsResource { private static Logger logger = LoggerFactory.getLogger(DesignsResource.class); private static ObjectMapper mapper = new ObjectMapper(); static { mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); mapper.setSerializationInclusion(Include.NON_NULL); } @Inject private HubConfiguration config; @Inject private IStorage storage; @Inject private SourceConnectorFactory sourceConnectorFactory; @Inject private ISecurityContext security; @Inject private IApiMetrics metrics; @Inject private OaiCommandExecutor oaiCommandExecutor; @Inject private IEditingSessionManager editingSessionManager; @Inject private IMicrocksConnector microcks; @Context private HttpServletRequest request; @Context private HttpServletResponse response; @Inject private GitLabResourceResolver gitLabResolver; @Inject private GitHubResourceResolver gitHubResolver; @Inject private BitbucketResourceResolver bitbucketResolver; /** * @see io.apicurio.hub.api.rest.IDesignsResource#listDesigns() */ @Override public Collection<ApiDesign> listDesigns() throws ServerError { metrics.apiCall("/designs", "GET"); try { logger.debug("Listing API Designs"); String user = this.security.getCurrentUser().getLogin(); Collection<ApiDesign> designs = this.storage.listApiDesigns(user); return designs; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#importDesign(io.apicurio.hub.api.beans.ImportApiDesign) */ @Override public ApiDesign importDesign(ImportApiDesign info) throws ServerError, NotFoundException, ApiValidationException { metrics.apiCall("/designs", "PUT"); if (info.getData() != null && !info.getData().trim().isEmpty()) { logger.debug("Importing an API Design (from data)."); return importDesignFromData(info); } else { logger.debug("Importing an API Design: {}", info.getUrl()); if (info.getUrl() == null) { throw new ApiValidationException("No data provided to import."); } ISourceConnector connector = null; try { connector = this.sourceConnectorFactory.createConnector(info.getUrl()); } catch (NotFoundException nfe) { // This means it's not a source control URL. So we'll treat it as a raw content URL. connector = null; } if (connector != null) { return importDesignFromSource(info, connector); } else { return importDesignFromUrl(info); } } } /** * Imports an API Design from one of the source control systems using its API. * @param info * @param connector * @throws NotFoundException * @throws ServerError * @throws ApiValidationException */ private ApiDesign importDesignFromSource(ImportApiDesign info, ISourceConnector connector) throws NotFoundException, ServerError, ApiValidationException { try { ApiDesignResourceInfo resourceInfo = connector.validateResourceExists(info.getUrl()); ResourceContent initialApiContent = connector.getResourceContent(info.getUrl()); Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); String description = resourceInfo.getDescription(); if (description == null) { description = ""; } ApiDesign design = new ApiDesign(); design.setName(resourceInfo.getName()); design.setDescription(description); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { String content = initialApiContent.getContent(); if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(connector.getType()); return design; } catch (SourceConnectorException | IOException e) { throw new ServerError(e); } } /** * Imports an API Design from base64 encoded content included in the request. This supports * the use-case where the UI allows the user to simply copy/paste the full API content. * @param info * @throws ServerError */ private ApiDesign importDesignFromData(ImportApiDesign info) throws ServerError, ApiValidationException { try { String data = info.getData(); byte[] decodedData = Base64.decodeBase64(data); try (InputStream is = new ByteArrayInputStream(decodedData)) { String content = IOUtils.toString(is, "UTF-8"); ApiDesignResourceInfo resourceInfo = ApiDesignResourceInfo.fromContent(content); String name = resourceInfo.getName(); if (name == null) { name = "Imported API Design"; } Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); ApiDesign design = new ApiDesign(); design.setName(name); design.setDescription(resourceInfo.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(null); return design; } } catch (IOException e) { throw new ServerError(e); } catch (ApiValidationException ave) { throw ave; } catch (Exception e) { throw new ServerError(e); } } /** * Imports an API design from an arbitrary URL. This simply opens a connection to that * URL and tries to consume its content as an OpenAPI document. * @param info * @throws NotFoundException * @throws ServerError * @throws ApiValidationException */ private ApiDesign importDesignFromUrl(ImportApiDesign info) throws NotFoundException, ServerError, ApiValidationException { try { URL url = new URL(info.getUrl()); try (InputStream is = url.openStream()) { String content = IOUtils.toString(is, "UTF-8"); ApiDesignResourceInfo resourceInfo = ApiDesignResourceInfo.fromContent(content); String name = resourceInfo.getName(); if (name == null) { name = url.getPath(); if (name != null && name.indexOf("/") >= 0) { name = name.substring(name.indexOf("/") + 1); } } if (name == null) { name = "Imported API Design"; } Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); ApiDesign design = new ApiDesign(); design.setName(name); design.setDescription(resourceInfo.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); design.setTags(resourceInfo.getTags()); design.setType(resourceInfo.getType()); try { if (resourceInfo.getFormat() == FormatType.YAML) { content = FormatUtils.yamlToJson(content); } String id = this.storage.createApiDesign(user, design, content); design.setId(id); } catch (StorageException e) { throw new ServerError(e); } metrics.apiImport(null); return design; } } catch (ApiValidationException ave) { throw ave; } catch (IOException e) { throw new ServerError(e); } catch (Exception e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createDesign(io.apicurio.hub.api.beans.NewApiDesign) */ @Override public ApiDesign createDesign(NewApiDesign info) throws ServerError { logger.debug("Creating an API Design: {}", info.getName()); metrics.apiCall("/designs", "POST"); try { Date now = new Date(); String user = this.security.getCurrentUser().getLogin(); // The API Design meta-data ApiDesign design = new ApiDesign(); design.setName(info.getName()); design.setDescription(info.getDescription()); design.setCreatedBy(user); design.setCreatedOn(now); // The API Design content (OAI document) OasDocument doc; if (info.getSpecVersion() == null || info.getSpecVersion().equals("2.0")) { doc = (OasDocument) Library.createDocument(DocumentType.openapi2); design.setType(ApiDesignType.OpenAPI20); } else { doc = (OasDocument) Library.createDocument(DocumentType.openapi3); design.setType(ApiDesignType.OpenAPI30); } doc.info = doc.createInfo(); doc.info.title = info.getName(); doc.info.description = info.getDescription(); doc.info.version = "1.0.0"; String oaiContent = Library.writeDocumentToJSONString(doc); // Create the API Design in the database String designId = storage.createApiDesign(user, design, oaiContent); design.setId(designId); metrics.apiCreate(info.getSpecVersion()); return design; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getDesign(java.lang.String) */ @Override public ApiDesign getDesign(String designId) throws ServerError, NotFoundException { logger.debug("Getting an API design with ID {}", designId); metrics.apiCall("/designs/{designId}", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); return design; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#editDesign(java.lang.String) */ @Override public Response editDesign(String designId) throws ServerError, NotFoundException { logger.debug("Editing an API Design with ID {}", designId); metrics.apiCall("/designs/{designId}/session", "GET"); try { String user = this.security.getCurrentUser().getLogin(); logger.debug("\tUSER: {}", user); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); String content = designContent.getDocument(); long contentVersion = designContent.getContentVersion(); String secret = this.security.getToken().substring(0, Math.min(64, this.security.getToken().length() - 1)); String sessionId = this.editingSessionManager.createSessionUuid(designId, user, secret, contentVersion); logger.debug("\tCreated Session ID: {}", sessionId); logger.debug("\t Secret: {}", secret); byte[] bytes = content.getBytes(StandardCharsets.UTF_8); String ct = "application/json; charset=" + StandardCharsets.UTF_8; String cl = String.valueOf(bytes.length); ResponseBuilder builder = Response.ok().entity(content) .header("X-Apicurio-EditingSessionUuid", sessionId) .header("X-Apicurio-ContentVersion", contentVersion) .header("Content-Type", ct) .header("Content-Length", cl); return builder.build(); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteDesign(java.lang.String) */ @Override public void deleteDesign(String designId) throws ServerError, NotFoundException { logger.debug("Deleting an API Design with ID {}", designId); metrics.apiCall("/designs/{designId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); this.storage.deleteApiDesign(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getContributors(java.lang.String) */ @Override public Collection<Contributor> getContributors(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving contributors list for design with ID: {}", designId); metrics.apiCall("/designs/{designId}/contributors", "GET"); try { String user = this.security.getCurrentUser().getLogin(); return this.storage.listContributors(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getContent(java.lang.String, java.lang.String) */ @Override public Response getContent(String designId, String format) throws ServerError, NotFoundException { logger.debug("Getting content for API design with ID: {}", designId); metrics.apiCall("/designs/{designId}/content", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); List<ApiDesignCommand> apiCommands = this.storage.listContentCommands(user, designId, designContent.getContentVersion()); List<String> commands = new ArrayList<>(apiCommands.size()); for (ApiDesignCommand apiCommand : apiCommands) { commands.add(apiCommand.getCommand()); } String content = this.oaiCommandExecutor.executeCommands(designContent.getDocument(), commands); String ct = "application/json; charset=" + StandardCharsets.UTF_8; String cl = null; // Convert to yaml if necessary if ("yaml".equals(format)) { content = FormatUtils.jsonToYaml(content); ct = "application/x-yaml; charset=" + StandardCharsets.UTF_8; } byte[] bytes = content.getBytes(StandardCharsets.UTF_8); cl = String.valueOf(bytes.length); ResponseBuilder builder = Response.ok().entity(content) .header("Content-Type", ct) .header("Content-Length", cl); return builder.build(); } catch (StorageException | OaiCommandException | IOException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createInvitation(java.lang.String) */ @Override public Invitation createInvitation(String designId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Creating a collaboration invitation for API: {} ", designId); metrics.apiCall("/designs/{designId}/invitations", "POST"); try { String user = this.security.getCurrentUser().getLogin(); String username = this.security.getCurrentUser().getName(); String inviteId = UUID.randomUUID().toString(); ApiDesign design = this.storage.getApiDesign(user, designId); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.createCollaborationInvite(inviteId, designId, user, username, "collaborator", design.getName()); Invitation invite = new Invitation(); invite.setCreatedBy(user); invite.setCreatedOn(new Date()); invite.setDesignId(designId); invite.setInviteId(inviteId); invite.setStatus("pending"); return invite; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getInvitation(java.lang.String, java.lang.String) */ @Override public Invitation getInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Retrieving a collaboration invitation for API: {} and inviteID: {}", designId, inviteId); metrics.apiCall("/designs/{designId}/invitations/{inviteId}", "GET"); try { return this.storage.getCollaborationInvite(designId, inviteId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getInvitations(java.lang.String) */ @Override public Collection<Invitation> getInvitations(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving all collaboration invitations for API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "GET"); try { String user = this.security.getCurrentUser().getLogin(); return this.storage.listCollaborationInvites(designId, user); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#acceptInvitation(java.lang.String, java.lang.String) */ @Override public void acceptInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Accepting an invitation to collaborate on an API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); Invitation invite = this.storage.getCollaborationInvite(designId, inviteId); if (this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } boolean accepted = this.storage.updateCollaborationInviteStatus(inviteId, "pending", "accepted", user); if (!accepted) { throw new NotFoundException(); } this.storage.createPermission(designId, user, invite.getRole()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#rejectInvitation(java.lang.String, java.lang.String) */ @Override public void rejectInvitation(String designId, String inviteId) throws ServerError, NotFoundException { logger.debug("Rejecting an invitation to collaborate on an API: {}", designId); metrics.apiCall("/designs/{designId}/invitations", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); // This will ensure that the invitation exists for this designId. this.storage.getCollaborationInvite(designId, inviteId); boolean accepted = this.storage.updateCollaborationInviteStatus(inviteId, "pending", "rejected", user); if (!accepted) { throw new NotFoundException(); } } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCollaborators(java.lang.String) */ @Override public Collection<ApiDesignCollaborator> getCollaborators(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving all collaborators for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators", "GET"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listPermissions(designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#updateCollaborator(java.lang.String, java.lang.String, io.apicurio.hub.api.beans.UpdateCollaborator) */ @Override public void updateCollaborator(String designId, String userId, UpdateCollaborator update) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Updating collaborator for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators/{userId}", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.updatePermission(designId, userId, update.getNewRole()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCollaborator(java.lang.String, java.lang.String) */ @Override public void deleteCollaborator(String designId, String userId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting/revoking collaborator for API: {}", designId); metrics.apiCall("/designs/{designId}/collaborators/{userId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasOwnerPermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deletePermission(designId, userId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getActivity(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiDesignChange> getActivity(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { if (!config.isShareForEveryone()) { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } } return this.storage.listApiDesignActivity(designId, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getPublications(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiPublication> getPublications(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listApiDesignPublicationsBy(designId, user, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#publishApi(java.lang.String, io.apicurio.hub.api.beans.NewApiPublication) */ @Override public void publishApi(String designId, NewApiPublication info) throws ServerError, NotFoundException { LinkedAccountType type = info.getType(); try { // First step - publish the content to the soruce control system ISourceConnector connector = this.sourceConnectorFactory.createConnector(type); String resourceUrl = toResourceUrl(info); String formattedContent = getApiContent(designId, info.getFormat()); try { ResourceContent content = connector.getResourceContent(resourceUrl); content.setContent(formattedContent); connector.updateResourceContent(resourceUrl, info.getCommitMessage(), null, content); } catch (NotFoundException nfe) { connector.createResourceContent(resourceUrl, info.getCommitMessage(), formattedContent); } // Followup step - store a row in the api_content table try { String user = this.security.getCurrentUser().getLogin(); String publicationData = createPublicationData(info); storage.addContent(user, designId, ApiContentType.Publish, publicationData); } catch (Exception e) { logger.error("Failed to record API publication in database.", e); } } catch (SourceConnectorException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#mockApi(java.lang.String) */ @Override public MockReference mockApi(String designId) throws ServerError, NotFoundException { try { // First step - publish the content to the Microcks server API String content = getApiContent(designId, FormatType.YAML); String serviceRef = this.microcks.uploadResourceContent(content); // Build mockURL from microcksURL. String mockURL = null; String microcksURL = config.getMicrocksApiUrl(); try { mockURL = microcksURL.substring(0, microcksURL.indexOf("/api")) + "/#/services/" + URLEncoder.encode(serviceRef, "UTF-8"); } catch (Exception e) { logger.error("Failed to produce a valid mockURL", e); } // Followup step - store a row in the api_content table try { String user = this.security.getCurrentUser().getLogin(); String mockData = createMockData(serviceRef, mockURL); storage.addContent(user, designId, ApiContentType.Mock, mockData); } catch (Exception e) { logger.error("Failed to record API mock publication in database.", e); } // Finally return response. MockReference mockRef = new MockReference(); mockRef.setMockType("microcks"); mockRef.setServiceRef(serviceRef); mockRef.setMockURL(mockURL); return mockRef; } catch (MicrocksConnectorException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getMocks(java.lang.String, java.lang.Integer, java.lang.Integer) */ @Override public Collection<ApiMock> getMocks(String designId, Integer start, Integer end) throws ServerError, NotFoundException { int from = 0; int to = 20; if (start != null) { from = start.intValue(); } if (end != null) { to = end.intValue(); } try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new NotFoundException(); } return this.storage.listApiDesignMocks(designId, from, to); } catch (StorageException e) { throw new ServerError(e); } } /** * Creates the JSON data to be stored in the data row representing a "publish API" event * (also known as an API publication). * @param info */ private String createPublicationData(NewApiPublication info) { try { ObjectMapper mapper = new ObjectMapper(); ObjectNode data = JsonNodeFactory.instance.objectNode(); data.set("type", JsonNodeFactory.instance.textNode(info.getType().name())); data.set("org", JsonNodeFactory.instance.textNode(info.getOrg())); data.set("repo", JsonNodeFactory.instance.textNode(info.getRepo())); data.set("team", JsonNodeFactory.instance.textNode(info.getTeam())); data.set("group", JsonNodeFactory.instance.textNode(info.getGroup())); data.set("project", JsonNodeFactory.instance.textNode(info.getProject())); data.set("branch", JsonNodeFactory.instance.textNode(info.getBranch())); data.set("resource", JsonNodeFactory.instance.textNode(info.getResource())); data.set("format", JsonNodeFactory.instance.textNode(info.getFormat().name())); data.set("commitMessage", JsonNodeFactory.instance.textNode(info.getCommitMessage())); return mapper.writeValueAsString(data); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } /** * Create the JSON data to be stored in the data row representing a "mock API" event * (also know as an API mock publication). * @param serviceRef The service reference as returned by Microcks * @param mockURL The URL for accessing description page on Microcks server */ private String createMockData(String serviceRef, String mockURL) { try { ObjectMapper mapper = new ObjectMapper(); ObjectNode data = JsonNodeFactory.instance.objectNode(); data.set("mockType", JsonNodeFactory.instance.textNode("microcks")); data.set("serviceRef", JsonNodeFactory.instance.textNode(serviceRef)); data.set("mockURL", JsonNodeFactory.instance.textNode(mockURL)); return mapper.writeValueAsString(data); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } /** * Gets the current content of an API. * @param designId * @param format * @throws ServerError * @throws NotFoundException */ private String getApiContent(String designId, FormatType format) throws ServerError, NotFoundException { try { String user = this.security.getCurrentUser().getLogin(); ApiDesignContent designContent = this.storage.getLatestContentDocument(user, designId); String content = designContent.getDocument(); List<ApiDesignCommand> apiCommands = this.storage.listContentCommands(user, designId, designContent.getContentVersion()); if (!apiCommands.isEmpty()) { List<String> commands = new ArrayList<>(apiCommands.size()); for (ApiDesignCommand apiCommand : apiCommands) { commands.add(apiCommand.getCommand()); } content = this.oaiCommandExecutor.executeCommands(designContent.getDocument(), commands); } // Convert to yaml if necessary if (format == FormatType.YAML) { content = FormatUtils.jsonToYaml(content); } else { content = FormatUtils.formatJson(content); } return content; } catch (StorageException | OaiCommandException | IOException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCodegenProjects(java.lang.String) */ @Override public Collection<CodegenProject> getCodegenProjects(String designId) throws ServerError, NotFoundException { logger.debug("Retrieving codegen project list for design with ID: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "GET"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); return this.storage.listCodegenProjects(user, design.getId()); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#createCodegenProject(java.lang.String, io.apicurio.hub.api.beans.NewCodegenProject) */ @Override public CodegenProject createCodegenProject(String designId, NewCodegenProject body) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Creating a codegen project for API: {} ", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "POST"); try { String user = this.security.getCurrentUser().getLogin(); ApiDesign design = this.storage.getApiDesign(user, designId); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = new CodegenProject(); Date now = new Date(); project.setCreatedBy(user); project.setCreatedOn(now); project.setModifiedBy(user); project.setModifiedOn(now); project.setDesignId(design.getId()); project.setType(body.getProjectType()); project.setAttributes(new HashMap<String, String>()); if (body.getProjectConfig() != null) { project.getAttributes().putAll(body.getProjectConfig()); } project.getAttributes().put("location", body.getLocation().toString()); project.getAttributes().put("update-only", Boolean.FALSE.toString()); if (body.getPublishInfo() != null) { if (body.getPublishInfo().getType() != null) { project.getAttributes().put("publish-type", body.getPublishInfo().getType().toString()); } project.getAttributes().put("publish-branch", body.getPublishInfo().getBranch()); project.getAttributes().put("publish-commitMessage", body.getPublishInfo().getCommitMessage()); project.getAttributes().put("publish-group", body.getPublishInfo().getGroup()); project.getAttributes().put("publish-location", body.getPublishInfo().getLocation()); project.getAttributes().put("publish-org", body.getPublishInfo().getOrg()); project.getAttributes().put("publish-project", body.getPublishInfo().getProject()); project.getAttributes().put("publish-repo", body.getPublishInfo().getRepo()); project.getAttributes().put("publish-team", body.getPublishInfo().getTeam()); } if (body.getLocation() == CodegenLocation.download) { // Nothing extra to do when downloading - that will be handled by a separate call } if (body.getLocation() == CodegenLocation.sourceControl) { String prUrl = generateAndPublishProject(project, false); project.getAttributes().put("pullRequest-url", prUrl); } String projectId = this.storage.createCodegenProject(user, project); project.setId(projectId); return project; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getCodegenProjectAsZip(java.lang.String, java.lang.String) */ @Override public Response getCodegenProjectAsZip(String designId, String projectId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Downloading a codegen project for API Design with ID {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}/zip", "GET"); String user = this.security.getCurrentUser().getLogin(); try { if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = this.storage.getCodegenProject(user, designId, projectId); String oaiContent = this.getApiContent(designId, FormatType.JSON); // TODO support other types besides Thorntail if (project.getType() == CodegenProjectType.thorntail) { JaxRsProjectSettings settings = toJaxRsSettings(project); boolean updateOnly = "true".equals(project.getAttributes().get("update-only")); final OpenApi2Thorntail generator = new OpenApi2Thorntail(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return asResponse(settings, generator); } else if (project.getType() == CodegenProjectType.jaxrs) { JaxRsProjectSettings settings = toJaxRsSettings(project); boolean updateOnly = "true".equals(project.getAttributes().get("update-only")); final OpenApi2JaxRs generator = new OpenApi2JaxRs(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return asResponse(settings, generator); } else { throw new ServerError("Unsupported project type: " + project.getType()); } } catch (StorageException e) { throw new ServerError(e); } } /** * Generates the project and returns the result as a streaming response. * @param settings * @param generator */ private Response asResponse(JaxRsProjectSettings settings, final OpenApi2JaxRs generator) { StreamingOutput stream = new StreamingOutput() { @Override public void write(OutputStream output) throws IOException, WebApplicationException { generator.generate(output); } }; String fname = settings.artifactId + ".zip"; ResponseBuilder builder = Response.ok().entity(stream) .header("Content-Disposition", "attachment; filename=\"" + fname + "\"") .header("Content-Type", "application/zip"); return builder.build(); } /** * @see io.apicurio.hub.api.rest.IDesignsResource#updateCodegenProject(java.lang.String, java.lang.String, io.apicurio.hub.api.beans.UpdateCodgenProject) */ @Override public CodegenProject updateCodegenProject(String designId, String projectId, UpdateCodgenProject body) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Updating codegen project for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } CodegenProject project = this.storage.getCodegenProject(user, designId, projectId); project.setType(body.getProjectType()); project.setAttributes(new HashMap<String, String>()); if (body.getProjectConfig() != null) { project.getAttributes().putAll(body.getProjectConfig()); } project.getAttributes().put("location", body.getLocation().toString()); project.getAttributes().put("update-only", Boolean.TRUE.toString()); if (body.getPublishInfo() != null) { if (body.getPublishInfo().getType() != null) { project.getAttributes().put("publish-type", body.getPublishInfo().getType().toString()); } project.getAttributes().put("publish-branch", body.getPublishInfo().getBranch()); project.getAttributes().put("publish-commitMessage", body.getPublishInfo().getCommitMessage()); project.getAttributes().put("publish-group", body.getPublishInfo().getGroup()); project.getAttributes().put("publish-location", body.getPublishInfo().getLocation()); project.getAttributes().put("publish-org", body.getPublishInfo().getOrg()); project.getAttributes().put("publish-project", body.getPublishInfo().getProject()); project.getAttributes().put("publish-repo", body.getPublishInfo().getRepo()); project.getAttributes().put("publish-team", body.getPublishInfo().getTeam()); } if (body.getLocation() == CodegenLocation.download) { // Nothing extra to do when downloading - that will be handled by a separate call } if (body.getLocation() == CodegenLocation.sourceControl) { String prUrl = generateAndPublishProject(project, true); project.getAttributes().put("pullRequest-url", prUrl); } this.storage.updateCodegenProject(user, project); return project; } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCodegenProject(java.lang.String, java.lang.String) */ @Override public void deleteCodegenProject(String designId, String projectId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting codegen project for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects/{projectId}", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deleteCodegenProject(user, designId, projectId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#deleteCodegenProjects(java.lang.String) */ @Override public void deleteCodegenProjects(String designId) throws ServerError, NotFoundException, AccessDeniedException { logger.debug("Deleting ALL codegen projects for API: {}", designId); metrics.apiCall("/designs/{designId}/codegen/projects", "DELETE"); try { String user = this.security.getCurrentUser().getLogin(); if (!this.storage.hasWritePermission(user, designId)) { throw new AccessDeniedException(); } this.storage.deleteCodegenProjects(user, designId); } catch (StorageException e) { throw new ServerError(e); } } /** * Generate and publish (to a git/source control system) a project. This will * generate a project from the OpenAPI document and then publish the result to * a soruce control platform. * @param project * @param updateOnly * @return the URL of the published pull request */ private String generateAndPublishProject(CodegenProject project, boolean updateOnly) throws ServerError, NotFoundException { try { String oaiContent = this.getApiContent(project.getDesignId(), FormatType.JSON); // TODO support other types besides Thorntail if (project.getType() == CodegenProjectType.thorntail) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2Thorntail generator = new OpenApi2Thorntail(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else if (project.getType() == CodegenProjectType.jaxrs) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2JaxRs generator = new OpenApi2JaxRs(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else if (project.getType() == CodegenProjectType.quarkus) { JaxRsProjectSettings settings = toJaxRsSettings(project); OpenApi2Quarkus generator = new OpenApi2Quarkus(); generator.setSettings(settings); generator.setOpenApiDocument(oaiContent); generator.setUpdateOnly(updateOnly); return generateAndPublish(project, generator); } else { throw new ServerError("Unsupported project type: " + project.getType()); } } catch (IOException | SourceConnectorException e) { throw new ServerError(e); } } /** * Generates the project and publishes the result to e.g. GitHub. * @param project * @param generator * @throws IOException * @throws NotFoundException * @throws SourceConnectorException */ private String generateAndPublish(CodegenProject project, OpenApi2JaxRs generator) throws IOException, NotFoundException, SourceConnectorException { ByteArrayOutputStream generatedContent = generator.generate(); LinkedAccountType scsType = LinkedAccountType.valueOf(project.getAttributes().get("publish-type")); ISourceConnector connector = this.sourceConnectorFactory.createConnector(scsType); String url = toSourceResourceUrl(project); String commitMessage = project.getAttributes().get("publish-commitMessage"); String pullRequestUrl = connector.createPullRequestFromZipContent(url, commitMessage, new ZipInputStream(new ByteArrayInputStream(generatedContent.toByteArray()))); return pullRequestUrl; } /** * Reads JAX-RS project settings from the project. * @param project */ private JaxRsProjectSettings toJaxRsSettings(CodegenProject project) { boolean codeOnly = "true".equals(project.getAttributes().get("codeOnly")); boolean reactive = "true".equals(project.getAttributes().get("reactive")); String groupId = project.getAttributes().get("groupId"); String artifactId = project.getAttributes().get("artifactId"); String javaPackage = project.getAttributes().get("javaPackage"); JaxRsProjectSettings settings = new JaxRsProjectSettings(); settings.codeOnly = codeOnly; settings.reactive = reactive; settings.groupId = groupId != null ? groupId : "org.example.api"; settings.artifactId = artifactId != null ? artifactId : "generated-api"; settings.javaPackage = javaPackage != null ? javaPackage : "org.example.api"; return settings; } /** * Creates a source control resource URL from the information found in the codegen project. * @param project */ private String toSourceResourceUrl(CodegenProject project) { LinkedAccountType scsType = LinkedAccountType.valueOf(project.getAttributes().get("publish-type")); String url; switch (scsType) { case Bitbucket: { String team = project.getAttributes().get("publish-team"); String repo = project.getAttributes().get("publish-repo"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = bitbucketResolver.create(team, repo, branch, path); } break; case GitHub: { String org = project.getAttributes().get("publish-org"); String repo = project.getAttributes().get("publish-repo"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = gitHubResolver.create(org, repo, branch, path); } break; case GitLab: { String group = project.getAttributes().get("publish-group"); String proj = project.getAttributes().get("publish-project"); String branch = project.getAttributes().get("publish-branch"); String path = project.getAttributes().get("publish-location"); url = gitLabResolver.create(group, proj, branch, path); } break; default: throw new RuntimeException("Unsupported type: " + scsType); } return url; } /** * Uses the information in the bean to create a resource URL. */ private String toResourceUrl(NewApiPublication info) { if (info.getType() == LinkedAccountType.GitHub) { return gitHubResolver.create(info.getOrg(), info.getRepo(), info.getBranch(), info.getResource()); } if (info.getType() == LinkedAccountType.GitLab) { return gitLabResolver.create(info.getGroup(), info.getProject(), info.getBranch(), info.getResource()); } if (info.getType() == LinkedAccountType.Bitbucket) { return bitbucketResolver.create(info.getTeam(), info.getRepo(), info.getBranch(), info.getResource()); } return null; } /** * @see io.apicurio.hub.api.rest.IDesignsResource#validateDesign(java.lang.String) */ @Override public List<ValidationError> validateDesign(String designId) throws ServerError, NotFoundException { logger.debug("Validating API design with ID: {}", designId); metrics.apiCall("/designs/{designId}/validation", "GET"); String content = this.getApiContent(designId, FormatType.JSON); Document doc = Library.readDocumentFromJSONString(content); List<ValidationProblem> problems = Library.validate(doc, new IValidationSeverityRegistry() { @Override public ValidationProblemSeverity lookupSeverity(ValidationRuleMetaData rule) { return ValidationProblemSeverity.high; } }); List<ValidationError> errors = new ArrayList<>(); for (ValidationProblem problem : problems) { errors.add(new ValidationError(problem.errorCode, problem.nodePath.toString(), problem.property, problem.message, problem.severity.name())); } return errors; } /** * @see io.apicurio.hub.api.rest.IDesignsResource#configureSharing(java.lang.String, io.apicurio.hub.core.beans.UpdateSharingConfiguration) */ @Override public SharingConfiguration configureSharing(String designId, UpdateSharingConfiguration config) throws ServerError, NotFoundException { logger.debug("Configuring sharing settings for API: {} ", designId); metrics.apiCall("/designs/{designId}/sharing", "PUT"); try { String user = this.security.getCurrentUser().getLogin(); String uuid = UUID.randomUUID().toString(); // Note: only used if this is the first time if (!this.storage.hasOwnerPermission(user, designId)) { throw new NotFoundException(); } this.storage.setSharingConfig(designId, uuid, config.getLevel()); return this.storage.getSharingConfig(designId); } catch (StorageException e) { throw new ServerError(e); } } /** * @see io.apicurio.hub.api.rest.IDesignsResource#getSharingConfiguration(java.lang.String) */ @Override public SharingConfiguration getSharingConfiguration(String designId) throws ServerError, NotFoundException { logger.debug("Getting sharing settings for API: {} ", designId); metrics.apiCall("/designs/{designId}/sharing", "GET"); // Make sure we have access to the design. this.getDesign(designId); try { SharingConfiguration sharingConfig = this.storage.getSharingConfig(designId); if (sharingConfig == null) { sharingConfig = new SharingConfiguration(); sharingConfig.setLevel(SharingLevel.NONE); } return sharingConfig; } catch (StorageException e) { throw new ServerError(e); } } }
fixed quarkus project gen + download
back-end/hub-api/src/main/java/io/apicurio/hub/api/rest/impl/DesignsResource.java
fixed quarkus project gen + download
Java
apache-2.0
0fde332cf62af3bd54aa1a3d996262ed37f0ded5
0
DamonHD/reutils
/* Copyright (c) 2008-2021, Damon Hart-Davis, Ecotricity (Rob Clews). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hd.d.edh; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TimeZone; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.regex.Pattern; import org.hd.d.edh.FUELINST.CurrentSummary; /**Supporting routines of general interest for handling FUELINST data. */ public final class FUELINSTUtils { private FUELINSTUtils() { /* Prevent creation of an instance. */ } /**Longest edge of graphics building block components in pixels for HTML generation; strictly positive. */ static final int GCOMP_PX_MAX = 100; /**If true then when data is stale then cautiously never normally show a GREEN status, but YELLOW at best. */ private static final boolean NEVER_GREEN_WHEN_STALE = true; /**If the basic colour is GREEN but we're using pumped storage then we can indicate that with a yellowish green instead (ie mainly green, but not fully). */ static final String LESS_GREEN_STORAGE_DRAWDOWN = "olive"; /**If true then reject points with too few fuel types in mix since this is likely an error. */ final static int MIN_FUEL_TYPES_IN_MIX = 2; /**If true, compress (GZIP) any persisted state. */ static final boolean GZIP_CACHE = true; /**Immutable regex pattern for matching a valid fuel name (all upper-case ASCII first char, digits also allowed subsequently); non-null. */ public static final Pattern FUEL_NAME_REGEX = Pattern.compile("[A-Z][A-Z0-9]+"); /**Immutable regex pattern for matching a valid fuel intensity year 20XX; non-null. */ public static final Pattern FUEL_INTENSITY_YEAR_REGEX = Pattern.compile("20[0-9][0-9]"); /**SimpleDateFormat pattern to parse TIBCO FUELINST timestamp down to seconds (all assumed GMT/UTC); not null. * Example TIBCO timestamp: 2009:03:09:23:57:30:GMT * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String TIBCOTIMESTAMP_FORMAT = "yyyy:MM:dd:HH:mm:ss:zzz"; /**SimpleDateFormat pattern to parse CSV FUELINST timestamp down to seconds (all assumed GMT/UTC); not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String CSVTIMESTAMP_FORMAT = "yyyyMMddHHmmss"; /**SimpleDateFormat pattern to generate UTC date down to days; not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String UTCDAYFILENAME_FORMAT = "yyyyMMdd"; /**SimpleDateFormat pattern to generate ISO 8601 UTC timestamp down to minutes; not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String UTCMINTIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'"; /**SimpleDateFormat pattern to generate/parse compact HH:mm timestamp down to seconds (all assumed GMT/UTC); not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String HHMMTIMESTAMP_FORMAT = "HH:mm"; /**GMT TimeZone; never null. * Only package-visible because it may be mutable though we never attempt to mutate it. * <p> * We may share this (read-only) between threads and within this package. */ static final TimeZone GMT_TIME_ZONE = TimeZone.getTimeZone("GMT"); /**Charset for FUELINST data (ASCII 7-bit). */ public static final Charset FUELINST_CHARSET = StandardCharsets.US_ASCII; /**Number of hours in a day. */ public static final int HOURS_PER_DAY = 24; /**Number of hours in a week. */ public static final int HOURS_PER_WEEK = 7 * 24; /**Suffix to use for (serialised, gzipped) cache of last non-stale (24h) result. */ public static final String RESULT_CACHE_SUFFIX = ".cache"; /**Suffix to use for (gzipped, ASCII, CSV, pseudo-FUELINST format) longish-term (7d+) store. */ public static final String LONG_STORE_SUFFIX = ".longstore.csv.gz"; /**Compute current status of fuel intensity; never null, but may be empty/default if data not available. * If cacheing is enabled, then this may revert to cache in case of * difficulty retrieving new data. * <p> * Uses fuel intensities as of this year, ie when this call is made. * <p> * Purely functional other than some writes to stdout/stdere: * has no side-effects and does not alter the input. * * @param parsedBMRCSV parsed (as strings) BMR CSV file data, or null if unavailable * @return summary; never null * @throws IOException in case of data unavailabilty or corruption */ public static FUELINST.CurrentSummary computeCurrentSummary( final List<List<String>> parsedBMRCSV) throws IOException { // If passed-in data is obviously broken // then return an empty/default result. if((null == parsedBMRCSV) || parsedBMRCSV.isEmpty()) { return(new FUELINST.CurrentSummary()); } // Get as much set up as we can before pestering the data source... final Map<String, String> rawProperties = MainProperties.getRawProperties(); // final String dataURL = rawProperties.get(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); // if(null == dataURL) // { throw new IllegalStateException("Property undefined for data source URL: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); } final String template = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_ROW_FIELDNAMES); if(null == template) { throw new IllegalStateException("Property undefined for FUELINST row field names: " + FUELINST.FUELINST_MAIN_PROPNAME_ROW_FIELDNAMES); } // Use fuel intensities as of this year, ie when this call is made. final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final Map<String, Float> configuredIntensities = FUELINSTUtils.getConfiguredIntensities(todayUTC.getYear()); if(configuredIntensities.isEmpty()) { throw new IllegalStateException("Properties undefined for fuel intensities: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX + "*"); } final String maxIntensityAgeS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_AGE); if(null == maxIntensityAgeS) { throw new IllegalStateException("Property undefined for FUELINST acceptable age (s): " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_AGE); } final long maxIntensityAge = Math.round(1000 * Double.parseDouble(maxIntensityAgeS)); final String distLossS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); if(null == distLossS) { throw new IllegalStateException("Property undefined for FUELINST distribution loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); } final float distLoss = Float.parseFloat(distLossS); if(!(distLoss >= 0) && (distLoss <= 1)) { throw new IllegalStateException("Bad value outside range [0.0,1.0] for FUELINST distribution loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); } final String tranLossS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); if(null == tranLossS) { throw new IllegalStateException("Property undefined for FUELINST transmission loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); } final float tranLoss = Float.parseFloat(tranLossS); if(!(tranLoss >= 0) && (tranLoss <= 1)) { throw new IllegalStateException("Bad value outside range [0.0,1.0] for FUELINST transmission loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); } // Extract all fuel categories. final Map<String, Set<String>> fuelsByCategory = getFuelsByCategory(); // Extract Set of zero-or-more 'storage'/'fuel' types/names; never null but may be empty. final Set<String> storageTypes = (fuelsByCategory.containsKey(FUELINST.FUELINST_CATNAME_STORAGE) ? fuelsByCategory.get(FUELINST.FUELINST_CATNAME_STORAGE) : Collections.<String>emptySet()); // All intensity sample values from good records (assuming roughly equally spaced). final List<Integer> allIntensitySamples = new ArrayList<Integer>(parsedBMRCSV.size()); // Compute summary. final SimpleDateFormat timestampParser = FUELINSTUtils.getCSVTimestampParser(); int goodRecordCount = 0; int totalIntensity = 0; long firstGoodRecordTimestamp = 0; long lastGoodRecordTimestamp = 0; long minIntensityRecordTimestamp = 0; long maxIntensityRecordTimestamp = 0; int minIntensity = Integer.MAX_VALUE; int maxIntensity = Integer.MIN_VALUE; int currentIntensity = 0; long currentMW = 0; long currentStorageDrawdownMW = 0; Map<String,Integer> currentGenerationByFuel = Collections.emptyMap(); final int[] sampleCount = new int[FUELINSTUtils.HOURS_PER_DAY]; // Count of all good timestamped records. final long[] totalIntensityByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalGenerationByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalZCGenerationByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalStorageDrawdownByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. // Set of all usable fuel types encountered. final Set<String> usableFuels = new HashSet<String>(); // Sample-by-sample list of map of generation by fuel type (in MW) and from "" to weighted intensity (gCO2/kWh). final List<Map<String, Integer>> sampleBySampleGenForCorr = new ArrayList<Map<String,Integer>>(parsedBMRCSV.size()); // Compute (crude) correlation between fuel use and intensity. for(final List<String> row : parsedBMRCSV) { // Extract fuel values for this row and compute a weighted intensity... final Map<String, String> namedFields = DataUtils.extractNamedFieldsByPositionFromRow(template, row); // Special case after BMRS upgrade 2016/12/30: ignore trailing row starting "FTR ". if(namedFields.get("type").startsWith("FTR")) { continue; } // Reject malformed/unexpected data. if(!"FUELINST".equals(namedFields.get("type"))) { throw new IOException("Expected FUELINST data but got: " + namedFields.get("type")); } final Map<String,Integer> generationByFuel = new HashMap<String,Integer>(); long thisMW = 0; // Total MW generation in this slot. long thisStorageDrawdownMW = 0; // Total MW storage draw-down in this slot. long thisZCGenerationMW = 0; // Total zero-carbon generation in this slot. // Retain any field that is all caps so that we can display it. for(final String name : namedFields.keySet()) { // Skip if something other than a valid fuel name. if(!FUELINSTUtils.FUEL_NAME_REGEX.matcher(name).matches()) { // DHD20211031: all inspected were benign 'date', 'type', 'settlementperiod', 'timestamp'. // System.err.println("Skipping invalid 'fuel' name "+name+" at " + namedFields.get("timestamp") + " from row " + row); continue; } // Store the MW for this fuel. final int fuelMW = Integer.parseInt(namedFields.get(name), 10); if(fuelMW < 0) { continue; } // NB: -ve INTerconnector values in TIBCO data as of 2012 // { throw new IOException("Bad (-ve) fuel generation MW value: "+row); } thisMW += fuelMW; generationByFuel.put(name, fuelMW); // Slices of generation/demand. if(storageTypes.contains(name)) { thisStorageDrawdownMW += fuelMW; } final Float fuelInt = configuredIntensities.get(name); final boolean usableFuel = null != fuelInt; if(usableFuel) { usableFuels.add(name); } if(usableFuel && (fuelInt <= 0)) { thisZCGenerationMW += fuelMW; } } // Compute weighted intensity as gCO2/kWh for simplicity of representation. // 'Bad' fuels such as coal are ~1000, natural gas is <400, wind and nuclear are roughly 0. final int weightedIntensity = Math.round(1000 * FUELINSTUtils.computeWeightedIntensity(configuredIntensities, generationByFuel, MIN_FUEL_TYPES_IN_MIX)); // Reject bad (-ve) records. if(weightedIntensity < 0) { System.err.println("ERROR: skipping non-positive weighed intensity record at " + namedFields.get("timestamp")); continue; } allIntensitySamples.add(weightedIntensity); // For computing correlations... // Add entry only iff both a valid weighted intensity and at least one by-fuel number. if(!generationByFuel.isEmpty()) { final Map<String, Integer> corrEntry = new HashMap<String, Integer>(generationByFuel); corrEntry.put("", weightedIntensity); sampleBySampleGenForCorr.add(corrEntry); } currentMW = thisMW; currentIntensity = weightedIntensity; // Last (good) record we process is the 'current' one as they are in date order. currentGenerationByFuel = generationByFuel; currentStorageDrawdownMW = thisStorageDrawdownMW; // Last (good) record is 'current'. ++goodRecordCount; totalIntensity += weightedIntensity; // Extract timestamp field as defined in the template, format YYYYMMDDHHMMSS. final String rawTimestamp = namedFields.get("timestamp"); long recordTimestamp = 0; // Will be non-zero after a successful parse. if(null == rawTimestamp) { System.err.println("ERROR: missing FUELINST row timestamp"); } else { try { final Date d = timestampParser.parse(rawTimestamp); recordTimestamp = d.getTime(); lastGoodRecordTimestamp = recordTimestamp; if(firstGoodRecordTimestamp == 0) { firstGoodRecordTimestamp = recordTimestamp; } // Extract raw GMT hour from YYYYMMDDHH... final int hour = Integer.parseInt(rawTimestamp.substring(8, 10), 10); //System.out.println("H="+hour+": int="+weightedIntensity+", MW="+currentMW+" time="+d); ++sampleCount[hour]; // Accumulate intensity by hour... totalIntensityByHourOfDay[hour] += weightedIntensity; // Accumulate generation by hour... totalGenerationByHourOfDay[hour] += currentMW; // Note zero-carbon generation. totalZCGenerationByHourOfDay[hour] += thisZCGenerationMW; // Note storage draw-down, if any. totalStorageDrawdownByHourOfDay[hour] += thisStorageDrawdownMW; } catch(final ParseException e) { System.err.println("ERROR: unable to parse FUELINST record timestamp " + rawTimestamp + ": " + e.getMessage()); } } if(weightedIntensity < minIntensity) { minIntensity = weightedIntensity; minIntensityRecordTimestamp = recordTimestamp; } if(weightedIntensity > maxIntensity) { maxIntensity = weightedIntensity; maxIntensityRecordTimestamp = recordTimestamp; } } //System.out.println("INFO: first good record timestamp "+(new Date(firstGoodRecordTimestamp))); //System.out.println("INFO: last good record timestamp "+(new Date(lastGoodRecordTimestamp))+" vs now "+(new Date(System.currentTimeMillis()))); // Note if the intensity dropped/improved in the final samples. TrafficLight recentChange = null; if(allIntensitySamples.size() > 1) { final Integer prev = allIntensitySamples.get(allIntensitySamples.size() - 2); final Integer last = allIntensitySamples.get(allIntensitySamples.size() - 1); if(prev < last) { recentChange = TrafficLight.RED; } else if(prev > last) { recentChange = TrafficLight.GREEN; } else { recentChange = TrafficLight.YELLOW; } } // Compute traffic light status: defaults to 'unknown'. TrafficLight status = null; final int aveIntensity = totalIntensity / Math.max(goodRecordCount, 1); // Always set the outputs and let the caller decide what to do with aged data. int lowerThreshold = 0; int upperThreshold = 0; final int allSamplesSize = allIntensitySamples.size(); if(allSamplesSize > 3) // Only useful above some minimal set size. { // Normally we expect bmreports to give us 24hrs' data. // RED will be where the current value is in the upper quartile of the last 24hrs' intensities, // GREEN when in the lower quartile (and below the mean to be safe), so is fairly conservative, // YELLOW otherwise. // as long as we're on better-than-median intensity compared to the last 24 hours. final List<Integer> sortedIntensitySamples = new ArrayList<Integer>(allIntensitySamples); Collections.sort(sortedIntensitySamples); upperThreshold = sortedIntensitySamples.get(allSamplesSize-1 - (allSamplesSize / 4)); lowerThreshold = Math.min(sortedIntensitySamples.get(allSamplesSize / 4), aveIntensity); if(currentIntensity > upperThreshold) { status = TrafficLight.RED; } else if(currentIntensity < lowerThreshold) { status = TrafficLight.GREEN; } else { status = TrafficLight.YELLOW; } } //else { System.err.println("Newest data point too old"); } else { System.err.println("Too few samples: " + allSamplesSize); } // Compute mean intensity by time slot. final List<Integer> aveIntensityByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveIntensityByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalIntensityByHourOfDay[h] / sampleCount[h]))); } // Compute mean generation by time slot. final List<Integer> aveGenerationByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveGenerationByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalGenerationByHourOfDay[h] / sampleCount[h]))); } // Compute mean zero-carbon generation by time slot. final List<Integer> aveZCGenerationByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveZCGenerationByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalZCGenerationByHourOfDay[h] / sampleCount[h]))); } // Compute mean draw-down from storage by time slot. final List<Integer> aveStorageDrawdownByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveStorageDrawdownByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalStorageDrawdownByHourOfDay[h] / sampleCount[h]))); } // Compute fuel/intensity correlation. final Map<String,Float> correlationIntensityToFuel = new HashMap<String,Float>(usableFuels.size()); if(!sampleBySampleGenForCorr.isEmpty()) { // Compute correlation by fuel, where there are enough samples. for(final String fuel : usableFuels) { final List<Double> fuelMW = new ArrayList<Double>(sampleBySampleGenForCorr.size()); final List<Double> gridIntensity = new ArrayList<Double>(sampleBySampleGenForCorr.size()); for(int i = sampleBySampleGenForCorr.size(); --i >= 0; ) { final Map<String, Integer> s = sampleBySampleGenForCorr.get(i); // Only use matching pairs of intensity and MW values to keep lists matching by position. if(s.containsKey("") && s.containsKey(fuel)) { fuelMW.add(s.get(fuel).doubleValue()); gridIntensity.add(s.get("").doubleValue()); } } // Do not attempt unless enough samples. if(fuelMW.size() > 1) { final float corr = (float) StatsUtils.ComputePearsonCorrelation(gridIntensity, fuelMW); // Retain correlation only if sane / finite. if(!Float.isNaN(corr) && !Float.isInfinite(corr)) { correlationIntensityToFuel.put(fuel, corr); } } } } // Construct summary status... final FUELINST.CurrentSummary result = new FUELINST.CurrentSummary(status, recentChange, lastGoodRecordTimestamp, lastGoodRecordTimestamp + maxIntensityAge, currentMW, currentIntensity, currentGenerationByFuel, currentStorageDrawdownMW, minIntensity, minIntensityRecordTimestamp, aveIntensity, maxIntensity, maxIntensityRecordTimestamp, (lastGoodRecordTimestamp - firstGoodRecordTimestamp), goodRecordCount, lowerThreshold, upperThreshold, aveIntensityByHourOfDay, aveGenerationByHourOfDay, aveZCGenerationByHourOfDay, aveStorageDrawdownByHourOfDay, tranLoss + distLoss, correlationIntensityToFuel); return(result); } /**Compute variability % of a set as a function of its (non-negative) min and max values; always in range [0,100]. */ static int computeVariability(final int min, final int max) { if((min < 0) || (max < 0)) { throw new IllegalArgumentException(); } if(max == 0) { return(0); } return(100 - ((100*min)/max)); } /**Compute variability % of a set as a function of its min and max values; always in range [0,100]. */ static int computeVariability(final List<FUELINSTHistorical.TimestampedNonNegInt> intensities) { if(null == intensities) { throw new IllegalArgumentException(); } int min = Integer.MAX_VALUE; int max = 0; for(final FUELINSTHistorical.TimestampedNonNegInt ti : intensities) { if(ti.value > max) { max = ti.value; } if(ti.value < min) { min = ti.value; } } return(computeVariability(min, max)); } /**Given a set of relative fuel usages and carbon intensities, computes an overall intensity; never null. * This computes an intensity in the same units as the supplied values. * Fuels whose keys are not in the intensities Map will be ignored. * <p> * Inputs must not be altered while this is in progress. * <p> * This will not attempt to alter its inputs. * * @param intensities Map from fuel name to CO2 per unit of energy; never null * @param generationByFuel Map from fuel name to power being generated from that fuel; never null * @param minFuelTypesInMix minimum number of fuel types in mix else return -1; non-negative * * @return weighted intensity of specified fuel mix for fuels with known intensity, * or -1 if too few fuels in mix */ public static float computeWeightedIntensity(final Map<String, Float> intensities, final Map<String, Integer> generationByFuel, final int minFuelTypesInMix) { if(null == intensities) { throw new IllegalArgumentException(); } if(null == generationByFuel) { throw new IllegalArgumentException(); } if(minFuelTypesInMix < 0) { throw new IllegalArgumentException(); } // Compute set of keys common to both Maps. final Set<String> commonKeys = new HashSet<String>(intensities.keySet()); commonKeys.retainAll(generationByFuel.keySet()); // If too few fuels in the mix then quickly return -1 as a distinguished value. if(commonKeys.size() < minFuelTypesInMix) { return(-1); } int nonZeroFuelCount = 0; float totalGeneration = 0; float totalCO2 = 0; for(final String fuelName : commonKeys) { final float power = generationByFuel.get(fuelName); if(power < 0) { throw new IllegalArgumentException(); } if(power == 0) { continue; } ++nonZeroFuelCount; totalGeneration += power; totalCO2 += power * intensities.get(fuelName); } // If too few (non-zero) fuels in the mix then quickly return -1 as a distinguished value. if(nonZeroFuelCount < minFuelTypesInMix) { return(-1); } final float weightedIntensity = (totalGeneration == 0) ? 0 : totalCO2 / totalGeneration; return(weightedIntensity); } /**Handle the flag files that can be tested by remote servers. * The basic ".flag" file is present unless status is green * AND we have live data. * <p> * The more robust ".predicted.flag" file is present unless status is green. * Live data is used if present, else a prediction is made from historical data. * <p> * The keen ".supergreen.flag" file is present unless status is green * AND we have live data * AND no storage is being drawn down on the grid. * This means that we can be pretty sure that there is a surplus of energy available. * * @param baseFileName base file name to make flags; if null then don't do flags. * @param statusCapped status capped to YELLOW if there is no live data * @param statusUncapped uncapped status (can be green from prediction even if no live data) * @param status7dCapped 7d status capped to YELLOW if there is no live data * @throws IOException in case of problems */ static void doFlagFiles(final String baseFileName, final boolean isDataStale, final TrafficLight statusCapped, final TrafficLight statusUncapped, final long currentStorageDrawdownMW, final TrafficLight status7dCapped) throws IOException { if(null == baseFileName) { return; } // In the absence of current data, // then create/clear the flag based on historical data (ie predictions) where possible. // The flag file has terminating extension (from final ".") replaced with ".flag". // (If no extension is present then ".flag" is simply appended.) final File outputFlagFile = new File(baseFileName + ".flag"); final boolean basicFlagState = TrafficLight.GREEN != statusCapped; System.out.println("INFO: basic (green) flag file is " + outputFlagFile + ": " + (basicFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputFlagFile, basicFlagState); // 7d version. final File output7dFlagFile = new File(baseFileName + ".7d.flag"); final boolean basic7dFlagState = TrafficLight.GREEN != status7dCapped; System.out.println("INFO: basic 7d (green) flag file is " + output7dFlagFile + ": " + (basic7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(output7dFlagFile, basic7dFlagState); // Now deal with the flag that is prepared to make predictions from historical data, // ie helps to ensure that the flag will probably be cleared some time each day // even if our data source is unreliable. // When live data is available then this should be the same as the basic flag. final File outputPredictedFlagFile = new File(baseFileName + ".predicted.flag"); final boolean predictedFlagState = TrafficLight.GREEN != statusUncapped; System.out.println("INFO: predicted flag file is " + outputPredictedFlagFile + ": " + (predictedFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputPredictedFlagFile, predictedFlagState); // Present unless 'capped' value is green (and thus must also be from live data) // AND storage is not being drawn from. final File outputSupergreenFlagFile = new File(baseFileName + ".supergreen.flag"); final boolean supergreenFlagState = (basicFlagState) || (currentStorageDrawdownMW > 0); System.out.println("INFO: supergreen flag file is " + outputSupergreenFlagFile + ": " + (supergreenFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputSupergreenFlagFile, supergreenFlagState); // 7d version. final File outputSupergreen7dFlagFile = new File(baseFileName + ".7d.supergreen.flag"); final boolean supergreen7dFlagState = (TrafficLight.GREEN != status7dCapped) || (isDataStale) || (basic7dFlagState) || (currentStorageDrawdownMW > 0); System.out.println("INFO: supergreen 7d flag file is " + outputSupergreen7dFlagFile + ": " + (supergreen7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(outputSupergreen7dFlagFile, supergreen7dFlagState); // Present when red, ie not in most carbon-intensive part of the day. // Flag is computed even with stale data. final File outputRedFlagFile = new File(baseFileName + ".red.flag"); final boolean redFlagState = TrafficLight.RED == statusUncapped; System.out.println("INFO: red flag file is " + outputRedFlagFile + ": " + (redFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is not RED, else create it (for GREEN/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputRedFlagFile, redFlagState); // 7d version. final File output7dRedFlagFile = new File(baseFileName + ".7d.red.flag"); final boolean red7dFlagState = TrafficLight.RED == status7dCapped; System.out.println("INFO: 7d red flag file is " + output7dRedFlagFile + ": " + (red7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(output7dRedFlagFile, red7dFlagState); } /**Create/remove public (readable by everyone) flag file as needed to match required state. * @param outputFlagFile flag file to create (true) or remove (false) if required; non-null * @param flagRequiredPresent desired state for flag: true indicates present, false indicates absent * @throws IOException in case of difficulty */ static void doPublicFlagFile(final File outputFlagFile, final boolean flagRequiredPresent) throws IOException { if(flagRequiredPresent) { if(outputFlagFile.createNewFile()) { outputFlagFile.setReadable(true); System.out.println("INFO: flag file created: "+outputFlagFile); } } else { if(outputFlagFile.delete()) { System.out.println("INFO: flag file deleted: "+outputFlagFile); } } } /**Implement the 'traffic lights' command line option. * @param args optional (though usual) trailing argument (output HTML file name); never null */ static void doTrafficLights(final String[] args) throws IOException { if(null == args) { throw new IllegalArgumentException(); } final long startTime = System.currentTimeMillis(); System.out.println("INFO: generating traffic-light summary "+Arrays.asList(args)+"..."); final ExecutorService executor = Executors.newSingleThreadExecutor(); final String outputHTMLFileName = (args.length < 1) ? null : args[0]; final int lastDot = (outputHTMLFileName == null) ? -1 : outputHTMLFileName.lastIndexOf("."); // Base/prefix onto which to append specific extensions. final String baseFileName = (-1 == lastDot) ? outputHTMLFileName : outputHTMLFileName.substring(0, lastDot); // Compute relative paths for caches/stores. final File resultCacheFile = (null == baseFileName) ? null : (new File(baseFileName + RESULT_CACHE_SUFFIX)); final File longStoreFile = (null == baseFileName) ? null : (new File(baseFileName + LONG_STORE_SUFFIX)); // Fetch and parse the CSV file from the data source. // Will be null in case of inability to fetch or parse. final Map<String, String> rawProperties = MainProperties.getRawProperties(); final String dataURL = rawProperties.get(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); if(null == dataURL) { throw new IllegalStateException("Property undefined for data source URL: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); } List<List<String>> parsedBMRCSV = null; URL url = null; try { // Set up URL connection to fetch the data. url = new URL(dataURL.trim()); // Trim to avoid problems with trailing whitespace... final long dataFetchStart = System.currentTimeMillis(); parsedBMRCSV = DataUtils.parseBMRCSV(url, null); final long dataFetchEnd = System.currentTimeMillis(); System.out.println("INFO: record/row count of CSV FUELINST data: " + parsedBMRCSV.size() + " from source: " + url + " fetch and parse "+(dataFetchEnd-dataFetchStart)+"ms"); } catch(final IOException e) { // Could not get data, so status is unknown. System.err.println("ERROR: could not fetch data from " + url + " error: " + e.getMessage()); } // Validate parsedBMRCSV (correct ordering, no dates in future, etc). // Reject entirely if problem found. if(!DataUtils.isValidBMRData(parsedBMRCSV, System.currentTimeMillis(), HOURS_PER_DAY+1)) { System.err.println("ERROR: invalid CSV FUELINST data rejected."); parsedBMRCSV = null; } // Load long (7d) store if possible. List<List<String>> longStore = null; final long longStoreFetchStart = System.currentTimeMillis(); try { longStore = DataUtils.loadBMRCSV(longStoreFile); } catch(final IOException e) { System.err.println("WARNING: could not load long store "+longStoreFile+" error: " + e.getMessage()); } final long longStoreFetchEnd = System.currentTimeMillis(); System.out.println("Long store load and parse in "+(longStoreFetchEnd-longStoreFetchStart)+"ms."); // As of 2022-10 sometimes last few records are omitted apparently when server is busy. // Attempt to patch them up here... if((null != parsedBMRCSV) && (null != longStoreFile)) { final List<List<String>> appendedNewData = DataUtils.appendNewBMRDataRecords( parsedBMRCSV, longStore); if(null != appendedNewData) { System.err.println("WARNING: some recent records omitted from this data fetch: patched back in."); parsedBMRCSV = appendedNewData; } } // Attempt to update the long store with new records. // Keep the store length trimmed. Future<Long> longStoreSave = null; // Update the long store only if there is something valid to update it with. if(null != parsedBMRCSV) { // Append any new records to long store. final List<List<String>> appendedlongStore = DataUtils.appendNewBMRDataRecords( longStore, parsedBMRCSV); if(null != appendedlongStore) { longStore = appendedlongStore; } // Trim history in long store to maximum of 7 days. final List<List<String>> trimmedLongStore = DataUtils.trimBMRData( longStore, HOURS_PER_WEEK); if(null != trimmedLongStore) { longStore = trimmedLongStore; } // Save long store (asynchronously, atomically, world-readable). final List<List<String>> lsf = longStore; longStoreSave = executor.submit(() -> { final long longStoreSaveStart = System.currentTimeMillis(); DataUtils.saveBMRCSV(lsf, longStoreFile); final long longStoreSaveEnd = System.currentTimeMillis(); //System.out.println("Long store save in "+(longStoreSaveEnd-longStoreSaveStart)+"ms."); return(longStoreSaveEnd - longStoreSaveStart); }); } // Compute 24hr summary if we have fresh data. // // If parsedBMRCSV is null or empty // this will attempt to use cached result // else fall back to empty/default result. CurrentSummary summary24h = null; Future<Long> resultCacheSave = null; if((null != parsedBMRCSV) && !parsedBMRCSV.isEmpty()) { final CurrentSummary result = FUELINSTUtils.computeCurrentSummary(parsedBMRCSV); summary24h = result; // If cacheing is enabled AND the new result is not stale // then persist this result, compressed. if((null != resultCacheFile) && (summary24h.useByTime >= System.currentTimeMillis())) { resultCacheSave = executor.submit(() -> { final long s = System.currentTimeMillis(); DataUtils.serialiseToFile(result, resultCacheFile, FUELINSTUtils.GZIP_CACHE, true); final long e = System.currentTimeMillis(); return(e - s); }); } } else { // Try to retrieve from cache... FUELINST.CurrentSummary cached = null; try { cached = (FUELINST.CurrentSummary) DataUtils.deserialiseFromFile(resultCacheFile, FUELINSTUtils.GZIP_CACHE); } catch(final IOException err) { /* Fall through... */ } catch(final Exception err) { err.printStackTrace(); } if(null != cached) { System.err.println("WARNING: using previous result from cache..."); summary24h = cached; } // Use place-holder value. else { summary24h = new FUELINST.CurrentSummary(); } } // Compute 7-day summary if long store is available. CurrentSummary summary7d = null; if((null != longStore) && !longStore.isEmpty()) { summary7d = FUELINSTUtils.computeCurrentSummary(longStore); } // Dump a summary of the current status. System.out.println("INFO: 24h summary: " + summary24h); System.out.println("INFO: 7d summary: " + summary7d); // Is the data stale? final boolean isDataStale = summary24h.useByTime < startTime; // Compute intensity as seen by typical GB domestic consumer, gCO2/kWh. final int retailIntensity = Math.round((isDataStale ? summary24h.histAveIntensity : summary24h.currentIntensity) * (1 + summary24h.totalGridLosses)); if(outputHTMLFileName != null) { // Status to use to drive traffic-light measure. // If the data is current then use the latest data point, // else extract a suitable historical value to use in its place. final int hourOfDayHistorical = CurrentSummary.getGMTHourOfDay(startTime); final TrafficLight statusHistorical = summary24h.selectColour(summary24h.histAveIntensityByHourOfDay.get(hourOfDayHistorical)); final TrafficLight statusHistoricalCapped = (TrafficLight.GREEN != statusHistorical) ? statusHistorical : TrafficLight.YELLOW; final TrafficLight statusUncapped = (!isDataStale) ? summary24h.status : statusHistorical; final TrafficLight status = (!isDataStale) ? summary24h.status : (NEVER_GREEN_WHEN_STALE ? statusHistoricalCapped : statusHistorical); // Status over (up to) 7d; reverts to 24h versions if no live data. final TrafficLight status7d = (!isDataStale) ? summary7d.status : (NEVER_GREEN_WHEN_STALE ? statusHistoricalCapped : statusHistorical); // Handle the flag files that can be tested by remote servers. try { FUELINSTUtils.doFlagFiles(baseFileName, isDataStale, status, statusUncapped, summary24h.currentStorageDrawdownMW, status7d); } catch(final IOException e) { e.printStackTrace(); } final TwitterUtils.TwitterDetails td = TwitterUtils.getTwitterHandle(false); // Update the HTML page. try { FUELINSTUtils.updateHTMLFile(startTime, outputHTMLFileName, summary24h, summary7d, isDataStale, hourOfDayHistorical, status, td); } catch(final IOException e) { e.printStackTrace(); } // Update the XML data dump. try { final String outputXMLFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".xml") : (outputHTMLFileName + ".xml"); if(null != outputXMLFileName) { FUELINSTUtils.updateXMLFile(startTime, outputXMLFileName, summary24h, isDataStale, hourOfDayHistorical, status); } } catch(final IOException e) { e.printStackTrace(); } // Update the (mobile-friendly) XHTML page. try { final String outputXHTMLFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".xhtml") : (outputHTMLFileName + ".xhtml"); // if(null != outputXHTMLFileName) // { FUELINSTUtils.updateXHTMLFile(startTime, outputXHTMLFileName, summary24h, isDataStale, hourOfDayHistorical, status); // } } catch(final IOException e) { e.printStackTrace(); } // Update the plain-text intensity file. try { final String outputTXTFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".txt") : (outputHTMLFileName + ".txt"); // if(null != outputTXTFileName) // { FUELINSTUtils.updateTXTFile(startTime, outputTXTFileName, summary24h, isDataStale); // } } catch(final IOException e) { e.printStackTrace(); } // Update Twitter if it is set up // and if this represents a change from the previous status. // We may have different messages when we're working from historical data // because real-time / live data is not available. try { if(td != null) { // Compute name of file in which to cache last status we sent to Twitter. final String TwitterCacheFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".twittercache") : (outputHTMLFileName + ".twittercache"); // Attempt to update the displayed Twitter status as necessary // only if we think the status changed since we last sent it // and it has actually changed compared to what is at Twitter... // If we can't get a hand-crafted message then we create a simple one on the fly... // We use different messages for live and historical (stale) data. final String tweetMessage = FUELINSTUtils.generateTweetMessage( isDataStale, statusUncapped, retailIntensity); TwitterUtils.setTwitterStatusIfChanged( td, new File(TwitterCacheFileName), status, tweetMessage); } } catch(final IOException e) { e.printStackTrace(); } } // Update button(s)/icon(s). try { final File bd = new File(DEFAULT_BUTTON_BASE_DIR); if(bd.isDirectory() && bd.canWrite()) { GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 32, summary24h.timestamp, summary24h.status, retailIntensity); GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 48, summary24h.timestamp, summary24h.status, retailIntensity); GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 64, summary24h.timestamp, summary24h.status, retailIntensity); } else { System.err.println("ERROR: missing directory for icons: " + DEFAULT_BUTTON_BASE_DIR); } } catch(final IOException e) { e.printStackTrace(); } // New as of 2019-10. // Append to the intensity log. // Only do this for current/live data, ie if not stale. if(isDataStale || (0 == summary24h.timestamp)) { System.err.println("WARNING: will not update log, input data is stale."); } else { try { final File id = new File(DEFAULT_INTENSITY_LOG_BASE_DIR); if(id.isDirectory() && id.canWrite()) { appendToRetailIntensityLog(id, summary24h.timestamp, retailIntensity); } else { System.err.println("ERROR: missing directory for intensity log: " + DEFAULT_INTENSITY_LOG_BASE_DIR); } } catch(final IOException e) { e.printStackTrace(); } } // Wait for/reap any side tasks. if(null != resultCacheSave) { try { final Long rcT = resultCacheSave.get(); System.out.println("Result cache save in "+rcT+"ms."); } catch(final ExecutionException|InterruptedException e) { System.err.println("ERROR: could not update/save result cache: " + e.getMessage()); } } if(null != longStoreSave) { try { final Long lsT = longStoreSave.get(); System.out.println("Long store save in "+lsT+"ms."); } catch(final ExecutionException|InterruptedException e) { System.err.println("ERROR: could not update/save long store "+longStoreFile+" error: " + e.getMessage()); } } // Kill off the thread pool, completing any running task(s). // TODO: should probably be part of a finally for robustness. executor.shutdown(); final long endTime = System.currentTimeMillis(); System.out.println("doTrafficLights(): "+(endTime-startTime)+"ms."); } /**First (comment) line of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_1 = "# Retail GB electricity carbon intensity as computed by earth.org.uk."; /**Second (comment) line of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_2 = "# Time gCO2e/kWh"; /**Third (comment, intensities) line prefix of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX = "# Intensities gCO2/kWh:"; /**Append to (or create if necessary) the (retail) intensity log. * If run more often than new data is available * this may produce duplicate/repeated records. * <p> * Public for testability. * * @param id non-null writable directory for the log file * @param timestamp +ve timestamp of latest input available data point * @param retailIntensity non-negative retail/domestic intensity gCO2e/kWh * @return handle of log file, or null if none written */ public static File appendToRetailIntensityLog(File id, long timestamp, int retailIntensity) throws IOException { if(null == id) { throw new IllegalArgumentException(); } if(0 >= timestamp) { throw new IllegalArgumentException(); } if(0 > retailIntensity) { throw new IllegalArgumentException(); } // Compute the log filename. final SimpleDateFormat fsDF = new SimpleDateFormat(UTCDAYFILENAME_FORMAT); fsDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. final String dateUTC = fsDF.format(new Date(timestamp)); //System.out.println("UTC date for log: " + dateUTC); final File logFile = new File(id, dateUTC + ".log"); //System.out.println("Intensity log filename: " + logFile); // Compute the timestamp string for the log record. final SimpleDateFormat tsDF = new SimpleDateFormat(UTCMINTIMESTAMP_FORMAT); tsDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. final String timestampUTC = tsDF.format(new Date(timestamp)); // Refuse to write to a log other than today's for safety. // This may possibly wrongly drop records at either end of the day. final String todayDateUTC = fsDF.format(new Date()); if(!dateUTC.equals(todayDateUTC)) { System.err.println("WARNING: will not write to intensity log for "+dateUTC+" ("+timestampUTC+") at "+(new Date())); return(null); } // If multiple copies of this code run at once // then there may be a race creating/updating the file. // This especially applies to the header(s). final boolean logFileExists = logFile.exists(); try(PrintWriter pw = new PrintWriter( new BufferedWriter(new FileWriter(logFile, true)))) { // Write a header if the file was new. if(!logFileExists) { pw.println(RETAIL_INTENSITY_LOG_HEADER_LINE_1); pw.println("# Time gCO2e/kWh"); // DHD20211031: write out intensities based on today's year (parsed for consistency!) final Map<String, Float> configuredIntensities = getConfiguredIntensities(Integer.parseInt(todayDateUTC.substring(0, 4))); final SortedSet<String> fuels = new TreeSet<String>(configuredIntensities.keySet()); final StringBuilder isb = new StringBuilder(RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX.length() + 16*fuels.size()); isb.append(RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX); for(final String f : fuels) { isb.append(" "+f+"="+(Math.round(1000*configuredIntensities.get(f)))); } pw.println(isb); //System.err.println("isb: " + isb); } // Append the new record <timestamp> <intensity>. pw.print(timestampUTC); pw.print(' '); pw.println(retailIntensity); } // Attempt to ensure that the log file is readable by all. logFile.setReadable(true, false); return(logFile); } /**Base directory for embeddable intensity buttons/icons; not null. * Under 'out' directory of suitable vintage to get correct expiry. */ private static final String DEFAULT_BUTTON_BASE_DIR = "../out/hourly/button/"; /**Base directory for log of integer gCO2e/kWh intensity values; not null. * Under 'data' directory. * Intensity values are 'retail', ie as at a typical domestic consumer, * after transmission and distribution losses, based on non-embedded * generation seen on the GB national grid. * * The log is line-oriented with lines of the form (no leading spaces) * [ISO8601UTCSTAMPTOMIN] [kgCO2e/kWh] * ie two space-separated columns, eg: * # Other comment and one-of-data here. * # Time gCO2e/kWh * 2019-11-17T16:02Z 352 * 2019-11-17T16:12Z 351 * * Initial lines may be headers, starting with # in in column 1, * and may be ignored for data purposes. * * This may contain repeat records if data is sampled more often * than it is updated at the source. * * Records will not be generated when data is 'stale', * ie when fresh data is not available from the source. * * Log files will be named with the form YYYYMMDD.log * eg 20191117.log. */ private static final String DEFAULT_INTENSITY_LOG_BASE_DIR = "../data/FUELINST/log/live/"; /**Generate the text of the status Tweet. * Public to allow testing that returned Tweets are always valid. * * @param isDataStale true if we are working on historical/predicted (non-live) data * @param statusUncapped the uncapped current or predicted status; never null * @param retailIntensity intensity in gCO2/kWh as seen by retail customer, non-negative * @return human-readable valid Tweet message */ public static String generateTweetMessage( final boolean isDataStale, final TrafficLight statusUncapped, final int retailIntensity) // TODO { if(null == statusUncapped) { throw new IllegalArgumentException(); } final String statusTemplate = MainProperties.getRawProperties().get((isDataStale ? TwitterUtils.PNAME_PREFIX_TWITTER_TRAFFICLIGHT_PREDICTION_MESSAGES : TwitterUtils.PNAME_PREFIX_TWITTER_TRAFFICLIGHT_STATUS_MESSAGES) + statusUncapped); final String tweetMessage = ((statusTemplate != null) && !statusTemplate.isEmpty()) ? String.format(statusTemplate, retailIntensity).trim() : ("Grid status " + statusUncapped); return(tweetMessage); } /**Extract (immutable) intensity map from configuration information; never null but may be empty. * @return map from fuel name to kgCO2/kWh non-negative intensity; never null */ public static Map<String, String> getConfiguredFuelNames() { final Map<String, String> result = new HashMap<String, String>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUELNAME_INTENSITY_MAIN_PROPNAME_PREFIX)) { continue; } final String fuelname = key.substring(FUELINST.FUELNAME_INTENSITY_MAIN_PROPNAME_PREFIX.length()); final String descriptiveName = rawProperties.get(key).trim(); if(!FUEL_NAME_REGEX.matcher(fuelname).matches()) { // Stop things dead if a name is used that may break things later. throw new IllegalArgumentException("Invalid 'fuel' name " + fuelname); } if(descriptiveName.isEmpty()) { continue; } result.put(fuelname, descriptiveName); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) map from fuel category to set of fuel names; never null but may be empty. * The result only contains keys with non-empty fuelname sets. */ public static Map<String, Set<String>> getFuelsByCategory() { final Map<String, Set<String>> result = new HashMap<String, Set<String>>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUELINST_MAIN_PROPPREFIX_STORAGE_TYPES)) { continue; } final String category = key.substring(FUELINST.FUELINST_MAIN_PROPPREFIX_STORAGE_TYPES.length()); final String fuelnames = rawProperties.get(key).trim(); if(fuelnames.isEmpty()) { continue; } final HashSet<String> fuels = new HashSet<String>(Arrays.asList(fuelnames.trim().split(","))); result.put(category, Collections.unmodifiableSet(fuels)); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) intensity map from configuration information for a given year; never null but may be empty. * @param year if non-null preferred year for intensity and must be [2000,]; * this will use intensity values including the given year if possible, * else the default as for the no-argument call * * <p> * A default undated form such as <code>intensity.fuel.INTEW=0.45</code> is permitted, * in part for backward compatibility. * <p> * Other forms allowed have a suffix of: * <ul> * <li><code>.year</code> the given year, eg <code>intensity.fuel.INTEW.2021=0.45</code></li> * <li>[TODO] <code>.startYear/endYear</code> in given year range, inclusive</li> * <li>[TODO] <code>.startYear/</code> from given year, inclusive</li> * <li>[TODO] <code>./endYear</code> up to given year, inclusive</li> * </ul> * Dates specified must be unique and non-overlapping, * and startYear must not be after endYear. * <p> * This date format is potentially partly extensible to ISO8601 including ranges. * * TODO * * @return map from fuel name to kgCO2/kWh non-negative intensity; never null * */ public static Map<String, Float> getConfiguredIntensities(final Integer year) { final Map<String, Float> result = new HashMap<String, Float>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX)) { continue; } // Simple verification that fuel name may be valid, else reject. final String keytail = key.substring(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX.length()); if(keytail.length() < 2) { System.err.println("Trivially invalid fuel name " + key); continue; } // Extract fuel name. final String fuel; // Is the whole keytail an unqualified fule name (no date range). final boolean isUnqualified = FUELINSTUtils.FUEL_NAME_REGEX.matcher(keytail).matches(); // For the case where year is null, the entire tail must be a valid fuel name. if(year == null) { if(!isUnqualified) { // Cannot use unqualified entry with null argument. continue; } fuel = keytail; } else if(isUnqualified) { // This is a default (no date-range) default value. // Usable with a non-null year iff no value already captured for this fuel. if(!result.containsKey(keytail)) { fuel = keytail; } else { continue; } } else // year != null and this is not an unqualified entry... { // Split key tail in two at '.'. final String parts[] = keytail.split("[.]"); if(2 != parts.length) { System.err.println("Invalid fuel intensity key " + key); continue; } fuel = parts[0]; if(!FUELINSTUtils.FUEL_NAME_REGEX.matcher(fuel).matches()) { System.err.println("Invalid fuel name " + key); continue; } final int y = year; if((y < 2000) || (y >= 3000)) { throw new IllegalArgumentException("bad year " + y); } // Deal with date range cases. final int slashPos = parts[1].indexOf('/'); if(-1 != slashPos) { // Note: // assertEquals(1, "2012/".split("/").length); // assertEquals("2012", "2012/".split("/")[0]); // assertEquals(2, "/2012".split("/").length); // assertEquals("", "/2012".split("/")[0]); // assertEquals("2012", "/2012".split("/")[1]); // assertEquals(2, "2011/2012".split("/").length); // assertEquals("2011", "2011/2012".split("/")[0]); // assertEquals("2012", "2011/2012".split("/")[1]); final String slashParts[] = parts[1].split("/"); if(slashParts.length > 2) { System.err.println("Unable to parse data range for intensity value for " + key); continue; } if(!"".equals(slashParts[0]) && !FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(slashParts[0]).matches()) { System.err.println("Unable to parse data range start for intensity value for " + key); continue; } final short isYear = "".equals(slashParts[0]) ? 0 : Short.parseShort(slashParts[0]); if(isYear > y) { // Range start year is after current year, so does not apply. continue; } if(slashParts.length > 1) { if(!FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(slashParts[1]).matches()) { System.err.println("Unable to parse data range end for intensity value for " + key); continue; } final short ieYear = Short.parseShort(slashParts[1]); if(ieYear < isYear) { System.err.println("Unable to parse data range (start>end) for intensity value for " + key); continue; } if(ieYear < y) { // Range end year is before current year, so does not apply. continue; } } } // Deal with simple fuelname.year case. else if(FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(parts[1]).matches()) { final short iYear = Short.parseShort(parts[1]); if(iYear != y) { continue; } // Wrong year. } } // Reject non-parseable and illegal (eg -ve) values. final Float intensity; try { intensity = new Float(rawProperties.get(key)); } catch(final NumberFormatException e) { System.err.println("Unable to parse kgCO2/kWh intensity value for " + key); continue; } if(!(intensity >= 0) || Float.isInfinite(intensity) || Float.isNaN(intensity)) { System.err.println("Invalid (non-positive) kgCO2/kWh intensity value for " + key); continue; } result.put(fuel, intensity); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) intensity map from configuration information; never null but may be empty. * This will use the default (eg undated) intensity value for each fuel such as * <code>intensity.fuel.INTEW=0.45</code> * else the latest-dated value. * * @return map from each fuel name to kgCO2/kWh non-negative intensity; never null */ @Deprecated public static Map<String, Float> getConfiguredIntensities() { return(getConfiguredIntensities(null)); } /**Fall-back category to assign uncategorised fuels to; single token not null nor empty. */ public static final String UNCATEGORISED_FUELS = "uncategorised"; /**If true, show recent changes in intensity, though they can be very noisy. */ private static final boolean SHOW_INTENSITY_DELTA = false; /**Extract fuel use (in MW) by category from the current summary given the fuels-by-category table; never null but may be empty. * TODO: construct 'uncategorised' component automatically */ public static Map<String,Integer> getFuelMWByCategory(final Map<String,Integer> currentGenerationMWByFuel, final Map<String,Set<String>> fuelByCategory) { if(null == currentGenerationMWByFuel) { throw new IllegalArgumentException(); } if(null == fuelByCategory) { throw new IllegalArgumentException(); } final Map<String,Integer> result = new HashMap<String, Integer>((fuelByCategory.size()*2) + 3); // Construct each category's total generation.... for(final Map.Entry<String, Set<String>> c : fuelByCategory.entrySet()) { final String category = c.getKey(); final Set<String> fuels = c.getValue(); long total = 0; for(final String fuel : fuels) { final Integer q = currentGenerationMWByFuel.get(fuel); if(null == q) { System.err.println("no per-fuel MW value for "+fuel); continue; } if(q < 0) { throw new IllegalArgumentException("invalid negative per-fuel MW value"); } total += q; } // Check for overflow. if(total > Integer.MAX_VALUE) { throw new ArithmeticException("overflow"); } result.put(category, (int) total); } return(Collections.unmodifiableMap(result)); } /**Get a format for the BM timestamps in at least FUELINST data; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getCSVTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.CSVTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All bmreports timestamps are GMT/UTC. return(sDF); } /**Get a format for the BM timestamps in at least FUELINST data; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getTIBCOTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.TIBCOTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. return(sDF); } /**Get a format compact (HH:MM) timestamps; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getHHMMTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.HHMMTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. return(sDF); } /**Update (atomically if possible) the HTML traffic-light page. */ public static void updateHTMLFile(final long startTime, final String outputHTMLFileName, final FUELINST.CurrentSummary summary24h, final FUELINST.CurrentSummary summary7d, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status, final TwitterUtils.TwitterDetails td) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { final Map<String, String> rawProperties = MainProperties.getRawProperties(); // Write the preamble with the status text dropped in. final String statusColour = (status == null) ? null : status.toString().toLowerCase(); w.write(rawProperties.get("trafficLightPage.HTML.preamble"). replace("<!-- STATUS -->", (status == null) ? "UNKNOWN" : "<span style=\"color:"+statusColour+";background-color:black\">" + status + "</span>" + (isDataStale ? "*" : "") )); w.println(); if(isDataStale) { w.println("<p><em>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</em></p>"); } // Write out crude 'lights' with only appropriate lamp lit // and some appropriate text. final int sidePixels = GCOMP_PX_MAX; // Edge length of each 'lamp'. final String open = "<tr><th style=\"border:3px solid;height:"+sidePixels+"px;width:"+((3*sidePixels)/2)+"px"; final String close = "</th></tr>"; w.write("<div><table style=\"margin-left:auto;margin-right:auto\">"); final String weaselWord = isDataStale ? "probably " : ""; w.write(open+((status == TrafficLight.RED) ? ";background-color:red\">Grid carbon intensity is "+weaselWord+"high; please do not run big appliances such as a dishwasher or washing machine now if you can postpone" : "\">&nbsp;")+close); w.write(open+((status == TrafficLight.YELLOW) ? ";background-color:yellow\">Grid is "+weaselWord+"OK; but you could still avoid CO2 emissions by postponing running big appliances such as dishwashers or washing machines" : ((status == null) ? "\">Status is unknown" : "\">&nbsp;"))+close); w.write(open+((status == TrafficLight.GREEN) ? ";background-color:green\">Grid is "+weaselWord+"good; you might run major loads such as your dishwasher and/or washing machine now to minimise CO2 emissions" : "\">&nbsp;")+close); w.write("</table></div>"); w.println(); // Note very gently when the 7d status view is different. if(summary24h.status != summary7d.status) { w.println("<p style=\"text-align:center\">(Over a longer period, the current status is "+summary7d.status+".)</p>"); } // Note carbon savings that were available. if(summary24h.histMinIntensity < summary24h.histMaxIntensity) { w.println("<p style=\"text-align:center\">You might have saved as much as <strong style=\"font-size:xx-large\">"+FUELINSTUtils.computeVariability(summary24h.histMinIntensity, summary24h.histMaxIntensity)+"%</strong> carbon emissions by choosing the best time to run your washing and other major loads.</p>"); } // Note any recent change/delta iff the data is not stale. if(SHOW_INTENSITY_DELTA) { if(!isDataStale) { if(summary24h.recentChange == TrafficLight.GREEN) { w.println("<p style=\"color:green\">Good: carbon intensity (CO2 per kWh) is currently dropping.</p>"); } else if(summary24h.recentChange == TrafficLight.RED) { w.println("<p style=\"color:red\">Bad: carbon intensity (CO2 per kWh) is currently rising.</p>"); } } } w.println("<p>Latest data is from <strong>"+(new Date(summary24h.timestamp))+"</strong>. This page should be updated every few minutes: use your browser's refresh/reload button if you need to check again.</p>"); // If we have a Twitter account set up then brag about it here, // but only if we believe that we actually have write access to be doing updates... if(td != null) { w.print("<p>Follow this grid status on Twitter <a href=\"http://twitter.com/"); w.print(td.username); w.print("\">@"); w.print(td.username); w.print("</a>"); w.println(".</p>"); } // A bit of explanation... w.println(rawProperties.get("trafficLightPage.HTML.midamble")); // ------------------------------------------------------ // Now for the numbers... w.println("<h2>Technical Stuff</h2><p>You don't need to understand the numbers below, but some people like to see them!</p>"); // Replace estimate of end-user intensity with recent historical mean if the data is stale. w.write("<p>"); w.write(isDataStale ? "Recent effective carbon intensity for a domestic user at this time of day was " : "Effective grid carbon intensity for a domestic user is currently "); if(null != status) { w.write("<span style=\"font-size:xx-large;color:"+statusColour+";background-color:black\">"); } w.write(String.valueOf(Math.round((isDataStale ? summary24h.histAveIntensity : summary24h.currentIntensity) * (1 + summary24h.totalGridLosses)))); w.write("gCO2/kWh"); if(null != status) { w.write("</span>"); } w.write(" including transmission and distribution losses of "); w.write(String.valueOf(Math.round(100 * summary24h.totalGridLosses))); w.write("%.</p>"); w.println(); w.println("<p>Latest available grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) is approximately <strong>"+summary24h.currentIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.timestamp))+" over "+ summary24h.currentMW+"MW of generation, with a rolling average over "+((summary24h.histWindowSize+1800000) / 3600000)+"h of <strong>"+summary24h.histAveIntensity+"gCO2/kWh</strong>.</p>"); w.println("<p>Minimum grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histMinIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.minIntensityRecordTimestamp))+".</p>"); w.println("<p>Maximum grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histMaxIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.maxIntensityRecordTimestamp))+".</p>"); w.println("<p>Average/mean grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histAveIntensity+"gCO2/kWh</strong> over the sample data set, with an effective end-user intensity including transmission and distribution losses of <strong>"+(Math.round(summary24h.histAveIntensity * (1 + summary24h.totalGridLosses)))+"gCO2/kWh</strong>.</p>"); // Intensity (and generation) by hour of day. final int newSlot = FUELINST.CurrentSummary.getGMTHourOfDay(startTime); w.write("<div><table style=\"margin-left:auto;margin-right:auto\">"); w.write("<tr><th colspan=\"24\">"); w.write(isDataStale ? "Last available historical" : "Recent"); w.write(" mean GMT hourly generation intensity gCO2/kWh (average="+summary24h.histAveIntensity+"); *now (="+summary24h.currentIntensity+")</th></tr>"); w.write("<tr>"); // Always start at midnight GMT if the data is stale. final int startSlot = isDataStale ? 0 : (1 + Math.max(0, newSlot)) % 24; for(int h = 0; h < 24; ++h) { final StringBuffer sbh = new StringBuffer(2); final int displayHourGMT = (h + startSlot) % 24; sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } if(hourOfDayHistorical == displayHourGMT) { sbh.append('*'); } w.write("<th style=\"border:1px solid\">"+sbh+"</th>"); } w.write("</tr>"); w.write("<tr>"); boolean usedLessGreen = false; final int maxHourlyIntensity = summary24h.histAveIntensityByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final Integer hIntensity = summary24h.histAveIntensityByHourOfDay.get(displayHourGMT); if((null == hIntensity) || (0 == hIntensity)) { w.write("<td></td>"); continue; /* Skip empty slot. */ } final TrafficLight rawHourStatus = summary24h.selectColour(hIntensity); // But if the colour is GREEN but we're using pumped storage // then switch to a paler shade instead (ie mainly green, but not fully)... final boolean lessGreen = ((TrafficLight.GREEN == rawHourStatus) && (summary24h.histAveStorageDrawdownByHourOfDay.get(displayHourGMT) > 0)); if(lessGreen) { usedLessGreen = true; } final String barColour = lessGreen ? FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN : rawHourStatus.toString().toLowerCase(); final int height = (GCOMP_PX_MAX*hIntensity) / Math.max(1, maxHourlyIntensity); w.write("<td style=\"width:30px\"><ul class=\"barGraph\">"); w.write("<li style=\"background-color:"+barColour+";height:"+height+"px;left:0\">"); w.write(String.valueOf(hIntensity)); w.write("</li>"); w.write("</ul></td>"); } w.write("</tr>"); w.write("<tr><th colspan=\"24\">Mean GMT hourly generation GW (<span style=\"color:gray\">all</span>, <span style=\"color:green\">zero-carbon</span>)</th></tr>"); w.write("<tr>"); // Compute the maximum generation in any of the hourly slots // to give us maximum scaling of the displayed bars. final int maxGenerationMW = summary24h.histAveGenerationByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final Integer hGeneration = summary24h.histAveGenerationByHourOfDay.get(displayHourGMT); if((null == hGeneration) || (0 == hGeneration)) { w.write("<td></td>"); continue; /* Skip empty slot. */ } final int height = (GCOMP_PX_MAX*hGeneration) / Math.max(1, maxGenerationMW); final int scaledToGW = (hGeneration + 500) / 1000; w.write("<td style=\"width:30px\"><ul class=\"barGraph\">"); w.write("<li style=\"background-color:gray;height:"+height+"px;left:0\">"); w.write(String.valueOf(scaledToGW)); w.write("</li>"); final int hZCGeneration = summary24h.histAveZCGenerationByHourOfDay.get0(displayHourGMT); if(0 != hZCGeneration) { w.write("<li style=\"background-color:green;height:"+((GCOMP_PX_MAX*hZCGeneration) / Math.max(1, maxGenerationMW))+"px;left:0\">"); if(hZCGeneration >= (maxGenerationMW/8)) { w.write(String.valueOf((hZCGeneration + 500) / 1000)); } w.write("</li>"); } // final int hDrawdown = summary.histAveStorageDrawdownByHourOfDay.get0(displayHourGMT); // if(0 != hDrawdown) // { // w.write("<li style=\"background-color:yellow;height:"+((GCOMP_PX_MAX*hDrawdown) / Math.max(1, maxGenerationMW))+"px;left:0px;\">"); // if(hDrawdown >= maxGenerationMW/8) { w.write(String.valueOf((hDrawdown + 500) / 1000)); } // w.write("</li>"); // } w.write("</ul></td>"); } w.write("</tr>"); w.write("</table></div>"); w.println(); // Footnotes if(usedLessGreen) { w.println("<p>Hours that are basically <span style=\"color:green\">green</span>, but in which there is draw-down from grid-connected storage with its attendant energy losses and also suggesting that little or no excess non-dispatchable generation is available, ie that are marginally green, are shaded <span style=\"color:"+FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN+"\">"+FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN+"</span>.</p>"); } // TODO: Show cumulative MWh and tCO2. if(!isDataStale) { // Show some stats only relevant for live data... w.write("<p>Current/latest fuel mix at "); w.write(String.valueOf(new Date(summary24h.timestamp))); w.write(':'); final SortedMap<String,Integer> power = new TreeMap<String, Integer>(summary24h.currentGenerationMWByFuelMW); for(final String fuel : power.keySet()) { w.write(' '); w.write(fuel); w.write("@"+power.get(fuel)+"MW"); } w.write(".</p>"); w.println(); if(summary24h.currentStorageDrawdownMW > 0) { w.write("<p>Current draw-down from storage is "); w.write(Long.toString(summary24h.currentStorageDrawdownMW)); w.write("MW.</p>"); w.println(); } // Show fuels broken down by category, if categories are assigned. final Map<String, Set<String>> byCategory = getFuelsByCategory(); if(!byCategory.isEmpty()) { final Map<String,Integer> byCat = getFuelMWByCategory(summary24h.currentGenerationMWByFuelMW, byCategory); w.write("<p>Generation by fuel category (may overlap):</p><dl>"); final SortedMap<String,Integer> powerbyCat = new TreeMap<String, Integer>(byCat); for(final String category : powerbyCat.keySet()) { final Integer genMW = powerbyCat.get(category); final int percent = Math.round((100.0f * genMW) / Math.max(1, summary24h.currentMW)); w.write("<dt>"); w.write(category); w.write(" @ "); w.write(Integer.toString(percent)); w.write("%</dt>"); w.write("<dd>"); // Write MW under this category. w.write(String.valueOf(genMW)); w.write("MW"); // Write sorted fuel list... w.write(" "); w.write((new ArrayList<String>(new TreeSet<String>(byCategory.get(category)))).toString()); w.write(""); w.write("</dd>"); } w.write("</dl>"); w.println(); } } final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final int intensityYear = todayUTC.getYear(); w.write("<p>Overall generation intensity (kgCO2/kWh) computed using the following fuel year-"+intensityYear+" intensities (other fuels/sources are ignored):"); final Map<String, Float> configuredIntensities = FUELINSTUtils.getConfiguredIntensities(intensityYear); final SortedMap<String,Float> intensities = new TreeMap<String, Float>(FUELINSTUtils.getConfiguredIntensities(intensityYear)); for(final String fuel : intensities.keySet()) { w.write(' '); w.write(fuel); w.write("="+intensities.get(fuel)); } w.write(".</p>"); w.println(); w.write("<p>Rolling correlation of fuel use against grid intensity (-ve implies that this fuel reduces grid intensity for non-callable sources):"); final SortedMap<String,Float> goodness = new TreeMap<String, Float>(summary24h.correlationIntensityToFuel); for(final String fuel : goodness.keySet()) { w.format(" %s=%.4f", fuel, goodness.get(fuel)); } w.write(".</p>"); w.println(); // Key for fuel names/codes if available. final SortedMap<String,String> fullFuelNames = new TreeMap<String,String>(FUELINSTUtils.getConfiguredFuelNames()); if(!fullFuelNames.isEmpty()) { w.write("<p>Key to fuel codes:</p><dl>"); for(final String fuel : fullFuelNames.keySet()) { w.write("<dt>"); w.write(fuel); w.write("</dt>"); w.write("<dd>"); w.write(fullFuelNames.get(fuel)); w.write("</dd>"); } w.write("</dl>"); w.println(); } // Some coverage information from the summaries. w.write("<p>(Histogram input windows: "); w.write(Long.toString((summary24h.histWindowSize + (1800*1000)) / (3600*1000))); w.write("h, "); w.write(Long.toString((summary7d.histWindowSize + (1800*1000)) / (3600*1000))); w.write("h"); w.write(".)</p>"); w.println(); w.println("<h3>Methodology</h3>"); w.println(rawProperties.get("methodology.HTML")); w.println("<p>This page updated at "+(new Date())+"; generation time "+(System.currentTimeMillis()-startTime)+"ms.</p>"); w.println(rawProperties.get("trafficLightPage.HTML.postamble")); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of HTML page... DataUtils.replacePublishedFile(outputHTMLFileName, baos.toByteArray()); } /**Update (atomically if possible) the plain-text bare gCO2e/kWh intensity value. * The file will be removed if the data is stale. * Predicted values are not published, only live fresh ones. */ static void updateTXTFile(final long startTime, final String outputTXTFileName, final CurrentSummary summary, final boolean isDataStale) throws IOException { // In case of stale/missing data remove any result file. if(isDataStale || (null == summary)) { (new File(outputTXTFileName)).delete(); return; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { w.write(String.valueOf(Math.round(summary.currentIntensity * (1 + summary.totalGridLosses)))); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of HTML page... DataUtils.replacePublishedFile(outputTXTFileName, baos.toByteArray()); } /**Update (atomically if possible) the mobile-friendly XHTML traffic-light page. * The generated page is designed to be very light-weight * and usable by a mobile phone (eg as if under the .mobi TLD). */ static void updateXHTMLFile(final long startTime, final String outputXHTMLFileName, final FUELINST.CurrentSummary summary, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(8192); final PrintWriter w = new PrintWriter(baos); try { final Map<String, String> rawProperties = MainProperties.getRawProperties(); w.println(rawProperties.get("trafficLightPage.XHTML.preamble")); w.println("<div style=\"background:"+((status == null) ? "gray" : status.toString().toLowerCase())+"\">"); final String weaselWord = isDataStale ? "probably " : ""; if(status == TrafficLight.RED) { w.println("Status RED: grid carbon intensity is "+weaselWord+"high; please do not run big appliances such as a dishwasher or washing machine now if you can postpone."); } else if(status == TrafficLight.GREEN) { w.println("Status GREEN: grid is "+weaselWord+"good; run appliances now to minimise CO2 emissions."); } else if(status == TrafficLight.YELLOW) { w.println("Status YELLOW: grid is "+weaselWord+"OK; but you could still avoid CO2 emissions by postponing running big appliances such as dishwashers or washing machines."); } else { w.println("Grid status is UNKNOWN."); } w.println("</div>"); if(isDataStale) { w.println("<p><em>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</em></p>"); } w.println("<p>This page updated at "+(new Date())+".</p>"); w.println(rawProperties.get("trafficLightPage.XHTML.postamble")); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of XHTML page... DataUtils.replacePublishedFile(outputXHTMLFileName, baos.toByteArray()); } /**Update (atomically if possible) the XML traffic-light data dump. * Dumps current-year (at time call is run) fuel intensities. */ public static void updateXMLFile(final long startTime, final String outputXMLFileName, final FUELINST.CurrentSummary summary, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { // final Map<String, String> rawProperties = MainProperties.getRawProperties(); w.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); w.println("<results>"); if(isDataStale) { w.println("<warning>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</warning>"); } w.println("<stale_data>"+isDataStale+"</stale_data>"); // if(status == TrafficLight.RED) // { w.println("<status>1</status>"); } // else if(status == TrafficLight.YELLOW) // { w.println("<status>0</status>"); } // else if(status == TrafficLight.GREEN) // { w.println("<status>-1</status>"); } w.print("<status>"); if(null != status) { w.print(status); } w.println("</status>"); if(summary.histMinIntensity < summary.histMaxIntensity) { w.println("<saving>"+FUELINSTUtils.computeVariability(summary.histMinIntensity, summary.histMaxIntensity)+"</saving>"); } // Note any recent change/delta if the data is not stale. if(!isDataStale) { if(summary.recentChange == TrafficLight.GREEN) // { w.println("<carbon_intensity>-1</carbon_intensity>"); } { w.println("<carbon_intensity>GREEN</carbon_intensity>"); } else if(summary.recentChange == TrafficLight.RED) // { w.println("<carbon_intensity>1</carbon_intensity>"); } { w.println("<carbon_intensity>RED</carbon_intensity>"); } } w.println("<timestamp>"+summary.timestamp+"</timestamp>"); w.write("<grid_carbon_intensity>"); w.write(String.valueOf(Math.round((isDataStale ? summary.histAveIntensity : summary.currentIntensity) * (1 + summary.totalGridLosses)))); w.write("</grid_carbon_intensity>"); w.println(); w.write("<transmission_losses>"); w.write(String.valueOf(Math.round(100 * summary.totalGridLosses))); w.write("</transmission_losses>"); w.println(); w.println("<latest>"); w.println("<carbon_intensity>"+ summary.currentIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.timestamp +"</timestamp>"); w.println("<generation>"+ summary.currentMW+"</generation>"); w.println("<rolling_average_period>"+((summary.histWindowSize+1800000) / 3600000)+"</rolling_average_period>"); w.println("<rolling_average_carbon_intensity>"+ summary.histAveIntensity+"</rolling_average_carbon_intensity>"); w.println("</latest>"); w.println("<minimum>"); w.println("<carbon_intensity>"+ summary.histMinIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.minIntensityRecordTimestamp +"</timestamp>"); w.println("</minimum>"); w.println("<maximum>"); w.println("<carbon_intensity>"+ summary.histMaxIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.maxIntensityRecordTimestamp +"</timestamp>"); w.println("</maximum>"); // Intensity (and generation) by hour of day. final int newSlot = FUELINST.CurrentSummary.getGMTHourOfDay(startTime); w.println("<generation_intensity>"); w.println("<average>"+summary.histAveIntensity+"</average>"); w.println("<current>"+summary.currentIntensity+"</current>"); // Always start at midnight GMT if the data is stale. final int startSlot = isDataStale ? 0 : (1 + Math.max(0, newSlot)) % 24; // final int maxHourlyIntensity = summary.histAveIntensityByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final StringBuffer sbh = new StringBuffer(2); final int displayHourGMT = (h + startSlot) % 24; sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } final Integer hIntensity = summary.histAveIntensityByHourOfDay.get(displayHourGMT); w.println("<sample>"); w.println("<hour>"+sbh+"</hour>"); w.println("<carbon_intensity>"); if((null == hIntensity) || (0 == hIntensity)) { /* Empty slot. */ } else { w.println(String.valueOf(hIntensity)); } w.println("</carbon_intensity>"); w.println("</sample>"); } w.println("</generation_intensity>"); w.println("<generation>"); // Compute the maximum generation in any of the hourly slots // to give us maximum scaling of the displayed bars. final int maxGenerationMW = summary.histAveGenerationByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final StringBuffer sbh = new StringBuffer(2); sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } final Integer hGeneration = summary.histAveGenerationByHourOfDay.get(displayHourGMT); if((null == hGeneration) || (0 == hGeneration)) { continue; /* Skip empty slot. */ } // final int height = (GCOMP_PX_MAX*hGeneration) / Math.max(1, maxGenerationMW); final int scaledToGW = (hGeneration + 500) / 1000; w.println("<sample>"); w.println("<hour>"+sbh+"</hour>"); w.println("<all>"+String.valueOf(scaledToGW)+"</all>"); final int hZCGeneration = summary.histAveZCGenerationByHourOfDay.get0(displayHourGMT); if(0 != hZCGeneration) { if(hZCGeneration >= (maxGenerationMW/8)) { w.println("<zero_carbon>"+String.valueOf((hZCGeneration + 500) / 1000)+"</zero_carbon>"); } } w.println("</sample>"); } w.println("</generation>"); // TODO: Show cumulative MWh and tCO2. // FIXME: DHD20090608: I suggest leaving the fuel names as-is (upper case) in the XML as those are the 'formal' Elexon names; convert for display if need be. // FIXME: DHD20090608: As fuel names may not always be XML-token-safe, maybe <fuel name="NNN">amount</fuel> would be better? if(!isDataStale) { w.println("<fuel_mix>"); w.println("<timestamp>"+summary.timestamp+"</timestamp>"); final SortedMap<String,Integer> power = new TreeMap<String, Integer>(summary.currentGenerationMWByFuelMW); for(final String fuel : power.keySet()) { w.println("<"+fuel.toLowerCase()+">"+power.get(fuel)+"</"+fuel.toLowerCase()+">"); } w.println("</fuel_mix>"); } w.println("<fuel_intensities>"); w.println("<timestamp>"+summary.timestamp+"</timestamp>"); // Note: current-year intensities are used. final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final int intensityYear = todayUTC.getYear(); final SortedMap<String,Float> intensities = new TreeMap<String, Float>(FUELINSTUtils.getConfiguredIntensities(intensityYear)); for(final String fuel : intensities.keySet()) { w.println("<"+fuel.toLowerCase()+">"+intensities.get(fuel)+"</"+fuel.toLowerCase()+">"); } w.println("</fuel_intensities>"); w.println("</results>"); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of XML page... DataUtils.replacePublishedFile(outputXMLFileName, baos.toByteArray()); } }
javasrc/org/hd/d/edh/FUELINSTUtils.java
/* Copyright (c) 2008-2021, Damon Hart-Davis, Ecotricity (Rob Clews). All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hd.d.edh; import java.io.BufferedWriter; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.net.URL; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.LocalDate; import java.time.ZoneOffset; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.SortedSet; import java.util.TimeZone; import java.util.TreeMap; import java.util.TreeSet; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.regex.Pattern; import org.hd.d.edh.FUELINST.CurrentSummary; /**Supporting routines of general interest for handling FUELINST data. */ public final class FUELINSTUtils { private FUELINSTUtils() { /* Prevent creation of an instance. */ } /**Longest edge of graphics building block components in pixels for HTML generation; strictly positive. */ static final int GCOMP_PX_MAX = 100; /**If true then when data is stale then cautiously never normally show a GREEN status, but YELLOW at best. */ private static final boolean NEVER_GREEN_WHEN_STALE = true; /**If the basic colour is GREEN but we're using pumped storage then we can indicate that with a yellowish green instead (ie mainly green, but not fully). */ static final String LESS_GREEN_STORAGE_DRAWDOWN = "olive"; /**If true then reject points with too few fuel types in mix since this is likely an error. */ final static int MIN_FUEL_TYPES_IN_MIX = 2; /**If true, compress (GZIP) any persisted state. */ static final boolean GZIP_CACHE = true; /**Immutable regex pattern for matching a valid fuel name (all upper-case ASCII first char, digits also allowed subsequently); non-null. */ public static final Pattern FUEL_NAME_REGEX = Pattern.compile("[A-Z][A-Z0-9]+"); /**Immutable regex pattern for matching a valid fuel intensity year 20XX; non-null. */ public static final Pattern FUEL_INTENSITY_YEAR_REGEX = Pattern.compile("20[0-9][0-9]"); /**SimpleDateFormat pattern to parse TIBCO FUELINST timestamp down to seconds (all assumed GMT/UTC); not null. * Example TIBCO timestamp: 2009:03:09:23:57:30:GMT * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String TIBCOTIMESTAMP_FORMAT = "yyyy:MM:dd:HH:mm:ss:zzz"; /**SimpleDateFormat pattern to parse CSV FUELINST timestamp down to seconds (all assumed GMT/UTC); not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String CSVTIMESTAMP_FORMAT = "yyyyMMddHHmmss"; /**SimpleDateFormat pattern to generate UTC date down to days; not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String UTCDAYFILENAME_FORMAT = "yyyyMMdd"; /**SimpleDateFormat pattern to generate ISO 8601 UTC timestamp down to minutes; not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String UTCMINTIMESTAMP_FORMAT = "yyyy-MM-dd'T'HH:mm'Z'"; /**SimpleDateFormat pattern to generate/parse compact HH:mm timestamp down to seconds (all assumed GMT/UTC); not null. * Note that SimpleDateFormat is not immutable nor thread-safe. */ public static final String HHMMTIMESTAMP_FORMAT = "HH:mm"; /**GMT TimeZone; never null. * Only package-visible because it may be mutable though we never attempt to mutate it. * <p> * We may share this (read-only) between threads and within this package. */ static final TimeZone GMT_TIME_ZONE = TimeZone.getTimeZone("GMT"); /**Charset for FUELINST data (ASCII 7-bit). */ public static final Charset FUELINST_CHARSET = StandardCharsets.US_ASCII; /**Number of hours in a day. */ public static final int HOURS_PER_DAY = 24; /**Number of hours in a week. */ public static final int HOURS_PER_WEEK = 7 * 24; /**Suffix to use for (serialised, gzipped) cache of last non-stale (24h) result. */ public static final String RESULT_CACHE_SUFFIX = ".cache"; /**Suffix to use for (gzipped, ASCII, CSV, pseudo-FUELINST format) longish-term (7d+) store. */ public static final String LONG_STORE_SUFFIX = ".longstore.csv.gz"; /**Compute current status of fuel intensity; never null, but may be empty/default if data not available. * If cacheing is enabled, then this may revert to cache in case of * difficulty retrieving new data. * <p> * Uses fuel intensities as of this year, ie when this call is made. * <p> * Purely functional other than some writes to stdout/stdere: * has no side-effects and does not alter the input. * * @param parsedBMRCSV parsed (as strings) BMR CSV file data, or null if unavailable * @return summary; never null * @throws IOException in case of data unavailabilty or corruption */ public static FUELINST.CurrentSummary computeCurrentSummary( final List<List<String>> parsedBMRCSV) throws IOException { // If passed-in data is obviously broken // then return an empty/default result. if((null == parsedBMRCSV) || parsedBMRCSV.isEmpty()) { return(new FUELINST.CurrentSummary()); } // Get as much set up as we can before pestering the data source... final Map<String, String> rawProperties = MainProperties.getRawProperties(); // final String dataURL = rawProperties.get(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); // if(null == dataURL) // { throw new IllegalStateException("Property undefined for data source URL: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); } final String template = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_ROW_FIELDNAMES); if(null == template) { throw new IllegalStateException("Property undefined for FUELINST row field names: " + FUELINST.FUELINST_MAIN_PROPNAME_ROW_FIELDNAMES); } // Use fuel intensities as of this year, ie when this call is made. final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final Map<String, Float> configuredIntensities = FUELINSTUtils.getConfiguredIntensities(todayUTC.getYear()); if(configuredIntensities.isEmpty()) { throw new IllegalStateException("Properties undefined for fuel intensities: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX + "*"); } final String maxIntensityAgeS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_AGE); if(null == maxIntensityAgeS) { throw new IllegalStateException("Property undefined for FUELINST acceptable age (s): " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_AGE); } final long maxIntensityAge = Math.round(1000 * Double.parseDouble(maxIntensityAgeS)); final String distLossS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); if(null == distLossS) { throw new IllegalStateException("Property undefined for FUELINST distribution loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); } final float distLoss = Float.parseFloat(distLossS); if(!(distLoss >= 0) && (distLoss <= 1)) { throw new IllegalStateException("Bad value outside range [0.0,1.0] for FUELINST distribution loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_DIST_LOSS); } final String tranLossS = rawProperties.get(FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); if(null == tranLossS) { throw new IllegalStateException("Property undefined for FUELINST transmission loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); } final float tranLoss = Float.parseFloat(tranLossS); if(!(tranLoss >= 0) && (tranLoss <= 1)) { throw new IllegalStateException("Bad value outside range [0.0,1.0] for FUELINST transmission loss: " + FUELINST.FUELINST_MAIN_PROPNAME_MAX_TRAN_LOSS); } // Extract all fuel categories. final Map<String, Set<String>> fuelsByCategory = getFuelsByCategory(); // Extract Set of zero-or-more 'storage'/'fuel' types/names; never null but may be empty. final Set<String> storageTypes = (fuelsByCategory.containsKey(FUELINST.FUELINST_CATNAME_STORAGE) ? fuelsByCategory.get(FUELINST.FUELINST_CATNAME_STORAGE) : Collections.<String>emptySet()); // All intensity sample values from good records (assuming roughly equally spaced). final List<Integer> allIntensitySamples = new ArrayList<Integer>(parsedBMRCSV.size()); // Compute summary. final SimpleDateFormat timestampParser = FUELINSTUtils.getCSVTimestampParser(); int goodRecordCount = 0; int totalIntensity = 0; long firstGoodRecordTimestamp = 0; long lastGoodRecordTimestamp = 0; long minIntensityRecordTimestamp = 0; long maxIntensityRecordTimestamp = 0; int minIntensity = Integer.MAX_VALUE; int maxIntensity = Integer.MIN_VALUE; int currentIntensity = 0; long currentMW = 0; long currentStorageDrawdownMW = 0; Map<String,Integer> currentGenerationByFuel = Collections.emptyMap(); final int[] sampleCount = new int[FUELINSTUtils.HOURS_PER_DAY]; // Count of all good timestamped records. final long[] totalIntensityByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalGenerationByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalZCGenerationByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. final long[] totalStorageDrawdownByHourOfDay = new long[FUELINSTUtils.HOURS_PER_DAY]; // Use long to avoid overflow if many samples. // Set of all usable fuel types encountered. final Set<String> usableFuels = new HashSet<String>(); // Sample-by-sample list of map of generation by fuel type (in MW) and from "" to weighted intensity (gCO2/kWh). final List<Map<String, Integer>> sampleBySampleGenForCorr = new ArrayList<Map<String,Integer>>(parsedBMRCSV.size()); // Compute (crude) correlation between fuel use and intensity. for(final List<String> row : parsedBMRCSV) { // Extract fuel values for this row and compute a weighted intensity... final Map<String, String> namedFields = DataUtils.extractNamedFieldsByPositionFromRow(template, row); // Special case after BMRS upgrade 2016/12/30: ignore trailing row starting "FTR ". if(namedFields.get("type").startsWith("FTR")) { continue; } // Reject malformed/unexpected data. if(!"FUELINST".equals(namedFields.get("type"))) { throw new IOException("Expected FUELINST data but got: " + namedFields.get("type")); } final Map<String,Integer> generationByFuel = new HashMap<String,Integer>(); long thisMW = 0; // Total MW generation in this slot. long thisStorageDrawdownMW = 0; // Total MW storage draw-down in this slot. long thisZCGenerationMW = 0; // Total zero-carbon generation in this slot. // Retain any field that is all caps so that we can display it. for(final String name : namedFields.keySet()) { // Skip if something other than a valid fuel name. if(!FUELINSTUtils.FUEL_NAME_REGEX.matcher(name).matches()) { // DHD20211031: all inspected were benign 'date', 'type', 'settlementperiod', 'timestamp'. // System.err.println("Skipping invalid 'fuel' name "+name+" at " + namedFields.get("timestamp") + " from row " + row); continue; } // Store the MW for this fuel. final int fuelMW = Integer.parseInt(namedFields.get(name), 10); if(fuelMW < 0) { continue; } // NB: -ve INTerconnector values in TIBCO data as of 2012 // { throw new IOException("Bad (-ve) fuel generation MW value: "+row); } thisMW += fuelMW; generationByFuel.put(name, fuelMW); // Slices of generation/demand. if(storageTypes.contains(name)) { thisStorageDrawdownMW += fuelMW; } final Float fuelInt = configuredIntensities.get(name); final boolean usableFuel = null != fuelInt; if(usableFuel) { usableFuels.add(name); } if(usableFuel && (fuelInt <= 0)) { thisZCGenerationMW += fuelMW; } } // Compute weighted intensity as gCO2/kWh for simplicity of representation. // 'Bad' fuels such as coal are ~1000, natural gas is <400, wind and nuclear are roughly 0. final int weightedIntensity = Math.round(1000 * FUELINSTUtils.computeWeightedIntensity(configuredIntensities, generationByFuel, MIN_FUEL_TYPES_IN_MIX)); // Reject bad (-ve) records. if(weightedIntensity < 0) { System.err.println("ERROR: skipping non-positive weighed intensity record at " + namedFields.get("timestamp")); continue; } allIntensitySamples.add(weightedIntensity); // For computing correlations... // Add entry only iff both a valid weighted intensity and at least one by-fuel number. if(!generationByFuel.isEmpty()) { final Map<String, Integer> corrEntry = new HashMap<String, Integer>(generationByFuel); corrEntry.put("", weightedIntensity); sampleBySampleGenForCorr.add(corrEntry); } currentMW = thisMW; currentIntensity = weightedIntensity; // Last (good) record we process is the 'current' one as they are in date order. currentGenerationByFuel = generationByFuel; currentStorageDrawdownMW = thisStorageDrawdownMW; // Last (good) record is 'current'. ++goodRecordCount; totalIntensity += weightedIntensity; // Extract timestamp field as defined in the template, format YYYYMMDDHHMMSS. final String rawTimestamp = namedFields.get("timestamp"); long recordTimestamp = 0; // Will be non-zero after a successful parse. if(null == rawTimestamp) { System.err.println("ERROR: missing FUELINST row timestamp"); } else { try { final Date d = timestampParser.parse(rawTimestamp); recordTimestamp = d.getTime(); lastGoodRecordTimestamp = recordTimestamp; if(firstGoodRecordTimestamp == 0) { firstGoodRecordTimestamp = recordTimestamp; } // Extract raw GMT hour from YYYYMMDDHH... final int hour = Integer.parseInt(rawTimestamp.substring(8, 10), 10); //System.out.println("H="+hour+": int="+weightedIntensity+", MW="+currentMW+" time="+d); ++sampleCount[hour]; // Accumulate intensity by hour... totalIntensityByHourOfDay[hour] += weightedIntensity; // Accumulate generation by hour... totalGenerationByHourOfDay[hour] += currentMW; // Note zero-carbon generation. totalZCGenerationByHourOfDay[hour] += thisZCGenerationMW; // Note storage draw-down, if any. totalStorageDrawdownByHourOfDay[hour] += thisStorageDrawdownMW; } catch(final ParseException e) { System.err.println("ERROR: unable to parse FUELINST record timestamp " + rawTimestamp + ": " + e.getMessage()); } } if(weightedIntensity < minIntensity) { minIntensity = weightedIntensity; minIntensityRecordTimestamp = recordTimestamp; } if(weightedIntensity > maxIntensity) { maxIntensity = weightedIntensity; maxIntensityRecordTimestamp = recordTimestamp; } } //System.out.println("INFO: first good record timestamp "+(new Date(firstGoodRecordTimestamp))); //System.out.println("INFO: last good record timestamp "+(new Date(lastGoodRecordTimestamp))+" vs now "+(new Date(System.currentTimeMillis()))); // Note if the intensity dropped/improved in the final samples. TrafficLight recentChange = null; if(allIntensitySamples.size() > 1) { final Integer prev = allIntensitySamples.get(allIntensitySamples.size() - 2); final Integer last = allIntensitySamples.get(allIntensitySamples.size() - 1); if(prev < last) { recentChange = TrafficLight.RED; } else if(prev > last) { recentChange = TrafficLight.GREEN; } else { recentChange = TrafficLight.YELLOW; } } // Compute traffic light status: defaults to 'unknown'. TrafficLight status = null; final int aveIntensity = totalIntensity / Math.max(goodRecordCount, 1); // Always set the outputs and let the caller decide what to do with aged data. int lowerThreshold = 0; int upperThreshold = 0; final int allSamplesSize = allIntensitySamples.size(); if(allSamplesSize > 3) // Only useful above some minimal set size. { // Normally we expect bmreports to give us 24hrs' data. // RED will be where the current value is in the upper quartile of the last 24hrs' intensities, // GREEN when in the lower quartile (and below the mean to be safe), so is fairly conservative, // YELLOW otherwise. // as long as we're on better-than-median intensity compared to the last 24 hours. final List<Integer> sortedIntensitySamples = new ArrayList<Integer>(allIntensitySamples); Collections.sort(sortedIntensitySamples); upperThreshold = sortedIntensitySamples.get(allSamplesSize-1 - (allSamplesSize / 4)); lowerThreshold = Math.min(sortedIntensitySamples.get(allSamplesSize / 4), aveIntensity); if(currentIntensity > upperThreshold) { status = TrafficLight.RED; } else if(currentIntensity < lowerThreshold) { status = TrafficLight.GREEN; } else { status = TrafficLight.YELLOW; } } //else { System.err.println("Newest data point too old"); } else { System.err.println("Too few samples: " + allSamplesSize); } // Compute mean intensity by time slot. final List<Integer> aveIntensityByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveIntensityByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalIntensityByHourOfDay[h] / sampleCount[h]))); } // Compute mean generation by time slot. final List<Integer> aveGenerationByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveGenerationByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalGenerationByHourOfDay[h] / sampleCount[h]))); } // Compute mean zero-carbon generation by time slot. final List<Integer> aveZCGenerationByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveZCGenerationByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalZCGenerationByHourOfDay[h] / sampleCount[h]))); } // Compute mean draw-down from storage by time slot. final List<Integer> aveStorageDrawdownByHourOfDay = new ArrayList<Integer>(24); for(int h = 0; h < 24; ++h) { aveStorageDrawdownByHourOfDay.add((sampleCount[h] < 1) ? null : Integer.valueOf((int) (totalStorageDrawdownByHourOfDay[h] / sampleCount[h]))); } // Compute fuel/intensity correlation. final Map<String,Float> correlationIntensityToFuel = new HashMap<String,Float>(usableFuels.size()); if(!sampleBySampleGenForCorr.isEmpty()) { // Compute correlation by fuel, where there are enough samples. for(final String fuel : usableFuels) { final List<Double> fuelMW = new ArrayList<Double>(sampleBySampleGenForCorr.size()); final List<Double> gridIntensity = new ArrayList<Double>(sampleBySampleGenForCorr.size()); for(int i = sampleBySampleGenForCorr.size(); --i >= 0; ) { final Map<String, Integer> s = sampleBySampleGenForCorr.get(i); // Only use matching pairs of intensity and MW values to keep lists matching by position. if(s.containsKey("") && s.containsKey(fuel)) { fuelMW.add(s.get(fuel).doubleValue()); gridIntensity.add(s.get("").doubleValue()); } } // Do not attempt unless enough samples. if(fuelMW.size() > 1) { final float corr = (float) StatsUtils.ComputePearsonCorrelation(gridIntensity, fuelMW); // Retain correlation only if sane / finite. if(!Float.isNaN(corr) && !Float.isInfinite(corr)) { correlationIntensityToFuel.put(fuel, corr); } } } } // Construct summary status... final FUELINST.CurrentSummary result = new FUELINST.CurrentSummary(status, recentChange, lastGoodRecordTimestamp, lastGoodRecordTimestamp + maxIntensityAge, currentMW, currentIntensity, currentGenerationByFuel, currentStorageDrawdownMW, minIntensity, minIntensityRecordTimestamp, aveIntensity, maxIntensity, maxIntensityRecordTimestamp, (lastGoodRecordTimestamp - firstGoodRecordTimestamp), goodRecordCount, lowerThreshold, upperThreshold, aveIntensityByHourOfDay, aveGenerationByHourOfDay, aveZCGenerationByHourOfDay, aveStorageDrawdownByHourOfDay, tranLoss + distLoss, correlationIntensityToFuel); return(result); } /**Compute variability % of a set as a function of its (non-negative) min and max values; always in range [0,100]. */ static int computeVariability(final int min, final int max) { if((min < 0) || (max < 0)) { throw new IllegalArgumentException(); } if(max == 0) { return(0); } return(100 - ((100*min)/max)); } /**Compute variability % of a set as a function of its min and max values; always in range [0,100]. */ static int computeVariability(final List<FUELINSTHistorical.TimestampedNonNegInt> intensities) { if(null == intensities) { throw new IllegalArgumentException(); } int min = Integer.MAX_VALUE; int max = 0; for(final FUELINSTHistorical.TimestampedNonNegInt ti : intensities) { if(ti.value > max) { max = ti.value; } if(ti.value < min) { min = ti.value; } } return(computeVariability(min, max)); } /**Given a set of relative fuel usages and carbon intensities, computes an overall intensity; never null. * This computes an intensity in the same units as the supplied values. * Fuels whose keys are not in the intensities Map will be ignored. * <p> * Inputs must not be altered while this is in progress. * <p> * This will not attempt to alter its inputs. * * @param intensities Map from fuel name to CO2 per unit of energy; never null * @param generationByFuel Map from fuel name to power being generated from that fuel; never null * @param minFuelTypesInMix minimum number of fuel types in mix else return -1; non-negative * * @return weighted intensity of specified fuel mix for fuels with known intensity, * or -1 if too few fuels in mix */ public static float computeWeightedIntensity(final Map<String, Float> intensities, final Map<String, Integer> generationByFuel, final int minFuelTypesInMix) { if(null == intensities) { throw new IllegalArgumentException(); } if(null == generationByFuel) { throw new IllegalArgumentException(); } if(minFuelTypesInMix < 0) { throw new IllegalArgumentException(); } // Compute set of keys common to both Maps. final Set<String> commonKeys = new HashSet<String>(intensities.keySet()); commonKeys.retainAll(generationByFuel.keySet()); // If too few fuels in the mix then quickly return -1 as a distinguished value. if(commonKeys.size() < minFuelTypesInMix) { return(-1); } int nonZeroFuelCount = 0; float totalGeneration = 0; float totalCO2 = 0; for(final String fuelName : commonKeys) { final float power = generationByFuel.get(fuelName); if(power < 0) { throw new IllegalArgumentException(); } if(power == 0) { continue; } ++nonZeroFuelCount; totalGeneration += power; totalCO2 += power * intensities.get(fuelName); } // If too few (non-zero) fuels in the mix then quickly return -1 as a distinguished value. if(nonZeroFuelCount < minFuelTypesInMix) { return(-1); } final float weightedIntensity = (totalGeneration == 0) ? 0 : totalCO2 / totalGeneration; return(weightedIntensity); } /**Handle the flag files that can be tested by remote servers. * The basic ".flag" file is present unless status is green * AND we have live data. * <p> * The more robust ".predicted.flag" file is present unless status is green. * Live data is used if present, else a prediction is made from historical data. * <p> * The keen ".supergreen.flag" file is present unless status is green * AND we have live data * AND no storage is being drawn down on the grid. * This means that we can be pretty sure that there is a surplus of energy available. * * @param baseFileName base file name to make flags; if null then don't do flags. * @param statusCapped status capped to YELLOW if there is no live data * @param statusUncapped uncapped status (can be green from prediction even if no live data) * @param status7dCapped 7d status capped to YELLOW if there is no live data * @throws IOException in case of problems */ static void doFlagFiles(final String baseFileName, final boolean isDataStale, final TrafficLight statusCapped, final TrafficLight statusUncapped, final long currentStorageDrawdownMW, final TrafficLight status7dCapped) throws IOException { if(null == baseFileName) { return; } // In the absence of current data, // then create/clear the flag based on historical data (ie predictions) where possible. // The flag file has terminating extension (from final ".") replaced with ".flag". // (If no extension is present then ".flag" is simply appended.) final File outputFlagFile = new File(baseFileName + ".flag"); final boolean basicFlagState = TrafficLight.GREEN != statusCapped; System.out.println("INFO: basic (green) flag file is " + outputFlagFile + ": " + (basicFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputFlagFile, basicFlagState); // 7d version. final File output7dFlagFile = new File(baseFileName + ".7d.flag"); final boolean basic7dFlagState = TrafficLight.GREEN != status7dCapped; System.out.println("INFO: basic 7d (green) flag file is " + output7dFlagFile + ": " + (basic7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(output7dFlagFile, basic7dFlagState); // Now deal with the flag that is prepared to make predictions from historical data, // ie helps to ensure that the flag will probably be cleared some time each day // even if our data source is unreliable. // When live data is available then this should be the same as the basic flag. final File outputPredictedFlagFile = new File(baseFileName + ".predicted.flag"); final boolean predictedFlagState = TrafficLight.GREEN != statusUncapped; System.out.println("INFO: predicted flag file is " + outputPredictedFlagFile + ": " + (predictedFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputPredictedFlagFile, predictedFlagState); // Present unless 'capped' value is green (and thus must also be from live data) // AND storage is not being drawn from. final File outputSupergreenFlagFile = new File(baseFileName + ".supergreen.flag"); final boolean supergreenFlagState = (basicFlagState) || (currentStorageDrawdownMW > 0); System.out.println("INFO: supergreen flag file is " + outputSupergreenFlagFile + ": " + (supergreenFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is GREEN, else create it (for RED/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputSupergreenFlagFile, supergreenFlagState); // 7d version. final File outputSupergreen7dFlagFile = new File(baseFileName + ".7d.supergreen.flag"); final boolean supergreen7dFlagState = (TrafficLight.GREEN != status7dCapped) || (isDataStale) || (basic7dFlagState) || (currentStorageDrawdownMW > 0); System.out.println("INFO: supergreen 7d flag file is " + outputSupergreen7dFlagFile + ": " + (supergreen7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(outputSupergreen7dFlagFile, supergreen7dFlagState); // Present when red, ie not in most carbon-intensive part of the day. // Flag is computed even with stale data. final File outputRedFlagFile = new File(baseFileName + ".red.flag"); final boolean redFlagState = TrafficLight.RED == statusUncapped; System.out.println("INFO: red flag file is " + outputRedFlagFile + ": " + (redFlagState ? "set" : "clear")); // Remove power-low/grid-poor flag file when status is not RED, else create it (for GREEN/YELLOW/unknown). FUELINSTUtils.doPublicFlagFile(outputRedFlagFile, redFlagState); // 7d version. final File output7dRedFlagFile = new File(baseFileName + ".7d.red.flag"); final boolean red7dFlagState = TrafficLight.RED == status7dCapped; System.out.println("INFO: 7d red flag file is " + output7dRedFlagFile + ": " + (red7dFlagState ? "set" : "clear")); FUELINSTUtils.doPublicFlagFile(output7dRedFlagFile, red7dFlagState); } /**Create/remove public (readable by everyone) flag file as needed to match required state. * @param outputFlagFile flag file to create (true) or remove (false) if required; non-null * @param flagRequiredPresent desired state for flag: true indicates present, false indicates absent * @throws IOException in case of difficulty */ static void doPublicFlagFile(final File outputFlagFile, final boolean flagRequiredPresent) throws IOException { if(flagRequiredPresent) { if(outputFlagFile.createNewFile()) { outputFlagFile.setReadable(true); System.out.println("INFO: flag file created: "+outputFlagFile); } } else { if(outputFlagFile.delete()) { System.out.println("INFO: flag file deleted: "+outputFlagFile); } } } /**Implement the 'traffic lights' command line option. * @param args optional (though usual) trailing argument (output HTML file name); never null */ static void doTrafficLights(final String[] args) throws IOException { if(null == args) { throw new IllegalArgumentException(); } final long startTime = System.currentTimeMillis(); System.out.println("INFO: generating traffic-light summary "+Arrays.asList(args)+"..."); final ExecutorService executor = Executors.newSingleThreadExecutor(); final String outputHTMLFileName = (args.length < 1) ? null : args[0]; final int lastDot = (outputHTMLFileName == null) ? -1 : outputHTMLFileName.lastIndexOf("."); // Base/prefix onto which to append specific extensions. final String baseFileName = (-1 == lastDot) ? outputHTMLFileName : outputHTMLFileName.substring(0, lastDot); // Compute relative paths for caches/stores. final File resultCacheFile = (null == baseFileName) ? null : (new File(baseFileName + RESULT_CACHE_SUFFIX)); final File longStoreFile = (null == baseFileName) ? null : (new File(baseFileName + LONG_STORE_SUFFIX)); // Fetch and parse the CSV file from the data source. // Will be null in case of inability to fetch or parse. final Map<String, String> rawProperties = MainProperties.getRawProperties(); final String dataURL = rawProperties.get(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); if(null == dataURL) { throw new IllegalStateException("Property undefined for data source URL: " + FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_CURRENT_DATA_URL); } List<List<String>> parsedBMRCSV = null; URL url = null; try { // Set up URL connection to fetch the data. url = new URL(dataURL.trim()); // Trim to avoid problems with trailing whitespace... final long dataFetchStart = System.currentTimeMillis(); parsedBMRCSV = DataUtils.parseBMRCSV(url, null); final long dataFetchEnd = System.currentTimeMillis(); System.out.println("INFO: record/row count of CSV FUELINST data: " + parsedBMRCSV.size() + " from source: " + url + " fetch and parse "+(dataFetchEnd-dataFetchStart)+"ms"); } catch(final IOException e) { // Could not get data, so status is unknown. System.err.println("ERROR: could not fetch data from " + url + " error: " + e.getMessage()); } // Validate parsedBMRCSV (correct ordering, no dates in future, etc). // Reject entirely if problem found. if(!DataUtils.isValidBMRData(parsedBMRCSV, System.currentTimeMillis(), HOURS_PER_DAY+1)) { System.err.println("ERROR: invalid CSV FUELINST data rejected."); parsedBMRCSV = null; } // Load long (7d) store if possible. List<List<String>> longStore = null; final long longStoreFetchStart = System.currentTimeMillis(); try { longStore = DataUtils.loadBMRCSV(longStoreFile); } catch(final IOException e) { System.err.println("WARNING: could not load long store "+longStoreFile+" error: " + e.getMessage()); } final long longStoreFetchEnd = System.currentTimeMillis(); System.out.println("Long store load and parse in "+(longStoreFetchEnd-longStoreFetchStart)+"ms."); // As of 2022-10 sometimes last few records are omitted apparently when server is busy. // Attempt to patch them up here... if((null != parsedBMRCSV) && (null != longStoreFile)) { final List<List<String>> appendedNewData = DataUtils.appendNewBMRDataRecords( parsedBMRCSV, longStore); if(null != appendedNewData) { System.err.println("WARNING: some recent records omitted from this data fetch: patched back in."); parsedBMRCSV = appendedNewData; } } // Attempt to update the long store with new records. // Keep the store length trimmed. Future<Long> longStoreSave = null; // Update the long store only if there is something valid to update it with. if(null != parsedBMRCSV) { // Append any new records to long store. final List<List<String>> appendedlongStore = DataUtils.appendNewBMRDataRecords( longStore, parsedBMRCSV); if(null != appendedlongStore) { longStore = appendedlongStore; } // Trim history in long store to maximum of 7 days. final List<List<String>> trimmedLongStore = DataUtils.trimBMRData( longStore, HOURS_PER_WEEK); if(null != trimmedLongStore) { longStore = trimmedLongStore; } // Save long store (asynchronously, atomically, world-readable). final List<List<String>> lsf = longStore; longStoreSave = executor.submit(() -> { final long longStoreSaveStart = System.currentTimeMillis(); DataUtils.saveBMRCSV(lsf, longStoreFile); final long longStoreSaveEnd = System.currentTimeMillis(); //System.out.println("Long store save in "+(longStoreSaveEnd-longStoreSaveStart)+"ms."); return(longStoreSaveEnd - longStoreSaveStart); }); } // Compute 24hr summary if we have fresh data. // // If parsedBMRCSV is null or empty // this will attempt to use cached result // else fall back to empty/default result. CurrentSummary summary24h = null; Future<Long> resultCacheSave = null; if((null != parsedBMRCSV) && !parsedBMRCSV.isEmpty()) { final CurrentSummary result = FUELINSTUtils.computeCurrentSummary(parsedBMRCSV); summary24h = result; // If cacheing is enabled AND the new result is not stale // then persist this result, compressed. if((null != resultCacheFile) && (summary24h.useByTime >= System.currentTimeMillis())) { resultCacheSave = executor.submit(() -> { final long s = System.currentTimeMillis(); DataUtils.serialiseToFile(result, resultCacheFile, FUELINSTUtils.GZIP_CACHE, true); final long e = System.currentTimeMillis(); return(e - s); }); } } else { // Try to retrieve from cache... FUELINST.CurrentSummary cached = null; try { cached = (FUELINST.CurrentSummary) DataUtils.deserialiseFromFile(resultCacheFile, FUELINSTUtils.GZIP_CACHE); } catch(final IOException err) { /* Fall through... */ } catch(final Exception err) { err.printStackTrace(); } if(null != cached) { System.err.println("WARNING: using previous result from cache..."); summary24h = cached; } // Use place-holder value. else { summary24h = new FUELINST.CurrentSummary(); } } // Compute 7-day summary if long store is available. CurrentSummary summary7d = null; if((null != longStore) && !longStore.isEmpty()) { summary7d = FUELINSTUtils.computeCurrentSummary(longStore); } // Dump a summary of the current status. System.out.println("INFO: 24h summary: " + summary24h); System.out.println("INFO: 7d summary: " + summary7d); // Is the data stale? final boolean isDataStale = summary24h.useByTime < startTime; // Compute intensity as seen by typical GB domestic consumer, gCO2/kWh. final int retailIntensity = Math.round((isDataStale ? summary24h.histAveIntensity : summary24h.currentIntensity) * (1 + summary24h.totalGridLosses)); if(outputHTMLFileName != null) { // Status to use to drive traffic-light measure. // If the data is current then use the latest data point, // else extract a suitable historical value to use in its place. final int hourOfDayHistorical = CurrentSummary.getGMTHourOfDay(startTime); final TrafficLight statusHistorical = summary24h.selectColour(summary24h.histAveIntensityByHourOfDay.get(hourOfDayHistorical)); final TrafficLight statusHistoricalCapped = (TrafficLight.GREEN != statusHistorical) ? statusHistorical : TrafficLight.YELLOW; final TrafficLight statusUncapped = (!isDataStale) ? summary24h.status : statusHistorical; final TrafficLight status = (!isDataStale) ? summary24h.status : (NEVER_GREEN_WHEN_STALE ? statusHistoricalCapped : statusHistorical); // Status over (up to) 7d; reverts to 24h versions if no live data. final TrafficLight status7d = (!isDataStale) ? summary7d.status : (NEVER_GREEN_WHEN_STALE ? statusHistoricalCapped : statusHistorical); // Handle the flag files that can be tested by remote servers. try { FUELINSTUtils.doFlagFiles(baseFileName, isDataStale, status, statusUncapped, summary24h.currentStorageDrawdownMW, status7d); } catch(final IOException e) { e.printStackTrace(); } final TwitterUtils.TwitterDetails td = TwitterUtils.getTwitterHandle(false); // Update the HTML page. try { FUELINSTUtils.updateHTMLFile(startTime, outputHTMLFileName, summary24h, summary7d, isDataStale, hourOfDayHistorical, status, td); } catch(final IOException e) { e.printStackTrace(); } // Update the XML data dump. try { final String outputXMLFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".xml") : (outputHTMLFileName + ".xml"); if(null != outputXMLFileName) { FUELINSTUtils.updateXMLFile(startTime, outputXMLFileName, summary24h, isDataStale, hourOfDayHistorical, status); } } catch(final IOException e) { e.printStackTrace(); } // Update the (mobile-friendly) XHTML page. try { final String outputXHTMLFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".xhtml") : (outputHTMLFileName + ".xhtml"); // if(null != outputXHTMLFileName) // { FUELINSTUtils.updateXHTMLFile(startTime, outputXHTMLFileName, summary24h, isDataStale, hourOfDayHistorical, status); // } } catch(final IOException e) { e.printStackTrace(); } // Update the plain-text intensity file. try { final String outputTXTFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".txt") : (outputHTMLFileName + ".txt"); // if(null != outputTXTFileName) // { FUELINSTUtils.updateTXTFile(startTime, outputTXTFileName, summary24h, isDataStale); // } } catch(final IOException e) { e.printStackTrace(); } // Update Twitter if it is set up // and if this represents a change from the previous status. // We may have different messages when we're working from historical data // because real-time / live data is not available. try { if(td != null) { // Compute name of file in which to cache last status we sent to Twitter. final String TwitterCacheFileName = (-1 != lastDot) ? (outputHTMLFileName.substring(0, lastDot) + ".twittercache") : (outputHTMLFileName + ".twittercache"); // Attempt to update the displayed Twitter status as necessary // only if we think the status changed since we last sent it // and it has actually changed compared to what is at Twitter... // If we can't get a hand-crafted message then we create a simple one on the fly... // We use different messages for live and historical (stale) data. final String tweetMessage = FUELINSTUtils.generateTweetMessage( isDataStale, statusUncapped, retailIntensity); TwitterUtils.setTwitterStatusIfChanged( td, new File(TwitterCacheFileName), status, tweetMessage); } } catch(final IOException e) { e.printStackTrace(); } } // Update button(s)/icon(s). try { final File bd = new File(DEFAULT_BUTTON_BASE_DIR); if(bd.isDirectory() && bd.canWrite()) { GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 32, summary24h.timestamp, summary24h.status, retailIntensity); GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 48, summary24h.timestamp, summary24h.status, retailIntensity); GraphicsUtils.writeSimpleIntensityIconPNG(DEFAULT_BUTTON_BASE_DIR, 64, summary24h.timestamp, summary24h.status, retailIntensity); } else { System.err.println("ERROR: missing directory for icons: " + DEFAULT_BUTTON_BASE_DIR); } } catch(final IOException e) { e.printStackTrace(); } // New as of 2019-10. // Append to the intensity log. // Only do this for current/live data, ie if not stale. if(isDataStale || (0 == summary24h.timestamp)) { System.err.println("WARNING: will not update log, input data is stale."); } else { try { final File id = new File(DEFAULT_INTENSITY_LOG_BASE_DIR); if(id.isDirectory() && id.canWrite()) { appendToRetailIntensityLog(id, summary24h.timestamp, retailIntensity); } else { System.err.println("ERROR: missing directory for intensity log: " + DEFAULT_INTENSITY_LOG_BASE_DIR); } } catch(final IOException e) { e.printStackTrace(); } } // Wait for/reap any side tasks. if(null != resultCacheSave) { try { final Long rcT = resultCacheSave.get(); System.out.println("Result cache save in "+rcT+"ms."); } catch(final ExecutionException|InterruptedException e) { System.err.println("ERROR: could not update/save result cache: " + e.getMessage()); } } if(null != longStoreSave) { try { final Long lsT = longStoreSave.get(); System.out.println("Long store save in "+lsT+"ms."); } catch(final ExecutionException|InterruptedException e) { System.err.println("ERROR: could not update/save long store "+longStoreFile+" error: " + e.getMessage()); } } // Kill off the thread pool, completing any running task(s). // TODO: should probably be part of a finally for robustness. executor.shutdown(); } /**First (comment) line of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_1 = "# Retail GB electricity carbon intensity as computed by earth.org.uk."; /**Second (comment) line of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_2 = "# Time gCO2e/kWh"; /**Third (comment, intensities) line prefix of retail intensity log. */ public static final String RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX = "# Intensities gCO2/kWh:"; /**Append to (or create if necessary) the (retail) intensity log. * If run more often than new data is available * this may produce duplicate/repeated records. * <p> * Public for testability. * * @param id non-null writable directory for the log file * @param timestamp +ve timestamp of latest input available data point * @param retailIntensity non-negative retail/domestic intensity gCO2e/kWh * @return handle of log file, or null if none written */ public static File appendToRetailIntensityLog(File id, long timestamp, int retailIntensity) throws IOException { if(null == id) { throw new IllegalArgumentException(); } if(0 >= timestamp) { throw new IllegalArgumentException(); } if(0 > retailIntensity) { throw new IllegalArgumentException(); } // Compute the log filename. final SimpleDateFormat fsDF = new SimpleDateFormat(UTCDAYFILENAME_FORMAT); fsDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. final String dateUTC = fsDF.format(new Date(timestamp)); //System.out.println("UTC date for log: " + dateUTC); final File logFile = new File(id, dateUTC + ".log"); //System.out.println("Intensity log filename: " + logFile); // Compute the timestamp string for the log record. final SimpleDateFormat tsDF = new SimpleDateFormat(UTCMINTIMESTAMP_FORMAT); tsDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. final String timestampUTC = tsDF.format(new Date(timestamp)); // Refuse to write to a log other than today's for safety. // This may possibly wrongly drop records at either end of the day. final String todayDateUTC = fsDF.format(new Date()); if(!dateUTC.equals(todayDateUTC)) { System.err.println("WARNING: will not write to intensity log for "+dateUTC+" ("+timestampUTC+") at "+(new Date())); return(null); } // If multiple copies of this code run at once // then there may be a race creating/updating the file. // This especially applies to the header(s). final boolean logFileExists = logFile.exists(); try(PrintWriter pw = new PrintWriter( new BufferedWriter(new FileWriter(logFile, true)))) { // Write a header if the file was new. if(!logFileExists) { pw.println(RETAIL_INTENSITY_LOG_HEADER_LINE_1); pw.println("# Time gCO2e/kWh"); // DHD20211031: write out intensities based on today's year (parsed for consistency!) final Map<String, Float> configuredIntensities = getConfiguredIntensities(Integer.parseInt(todayDateUTC.substring(0, 4))); final SortedSet<String> fuels = new TreeSet<String>(configuredIntensities.keySet()); final StringBuilder isb = new StringBuilder(RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX.length() + 16*fuels.size()); isb.append(RETAIL_INTENSITY_LOG_HEADER_LINE_3_PREFIX); for(final String f : fuels) { isb.append(" "+f+"="+(Math.round(1000*configuredIntensities.get(f)))); } pw.println(isb); //System.err.println("isb: " + isb); } // Append the new record <timestamp> <intensity>. pw.print(timestampUTC); pw.print(' '); pw.println(retailIntensity); } // Attempt to ensure that the log file is readable by all. logFile.setReadable(true, false); return(logFile); } /**Base directory for embeddable intensity buttons/icons; not null. * Under 'out' directory of suitable vintage to get correct expiry. */ private static final String DEFAULT_BUTTON_BASE_DIR = "../out/hourly/button/"; /**Base directory for log of integer gCO2e/kWh intensity values; not null. * Under 'data' directory. * Intensity values are 'retail', ie as at a typical domestic consumer, * after transmission and distribution losses, based on non-embedded * generation seen on the GB national grid. * * The log is line-oriented with lines of the form (no leading spaces) * [ISO8601UTCSTAMPTOMIN] [kgCO2e/kWh] * ie two space-separated columns, eg: * # Other comment and one-of-data here. * # Time gCO2e/kWh * 2019-11-17T16:02Z 352 * 2019-11-17T16:12Z 351 * * Initial lines may be headers, starting with # in in column 1, * and may be ignored for data purposes. * * This may contain repeat records if data is sampled more often * than it is updated at the source. * * Records will not be generated when data is 'stale', * ie when fresh data is not available from the source. * * Log files will be named with the form YYYYMMDD.log * eg 20191117.log. */ private static final String DEFAULT_INTENSITY_LOG_BASE_DIR = "../data/FUELINST/log/live/"; /**Generate the text of the status Tweet. * Public to allow testing that returned Tweets are always valid. * * @param isDataStale true if we are working on historical/predicted (non-live) data * @param statusUncapped the uncapped current or predicted status; never null * @param retailIntensity intensity in gCO2/kWh as seen by retail customer, non-negative * @return human-readable valid Tweet message */ public static String generateTweetMessage( final boolean isDataStale, final TrafficLight statusUncapped, final int retailIntensity) // TODO { if(null == statusUncapped) { throw new IllegalArgumentException(); } final String statusTemplate = MainProperties.getRawProperties().get((isDataStale ? TwitterUtils.PNAME_PREFIX_TWITTER_TRAFFICLIGHT_PREDICTION_MESSAGES : TwitterUtils.PNAME_PREFIX_TWITTER_TRAFFICLIGHT_STATUS_MESSAGES) + statusUncapped); final String tweetMessage = ((statusTemplate != null) && !statusTemplate.isEmpty()) ? String.format(statusTemplate, retailIntensity).trim() : ("Grid status " + statusUncapped); return(tweetMessage); } /**Extract (immutable) intensity map from configuration information; never null but may be empty. * @return map from fuel name to kgCO2/kWh non-negative intensity; never null */ public static Map<String, String> getConfiguredFuelNames() { final Map<String, String> result = new HashMap<String, String>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUELNAME_INTENSITY_MAIN_PROPNAME_PREFIX)) { continue; } final String fuelname = key.substring(FUELINST.FUELNAME_INTENSITY_MAIN_PROPNAME_PREFIX.length()); final String descriptiveName = rawProperties.get(key).trim(); if(!FUEL_NAME_REGEX.matcher(fuelname).matches()) { // Stop things dead if a name is used that may break things later. throw new IllegalArgumentException("Invalid 'fuel' name " + fuelname); } if(descriptiveName.isEmpty()) { continue; } result.put(fuelname, descriptiveName); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) map from fuel category to set of fuel names; never null but may be empty. * The result only contains keys with non-empty fuelname sets. */ public static Map<String, Set<String>> getFuelsByCategory() { final Map<String, Set<String>> result = new HashMap<String, Set<String>>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUELINST_MAIN_PROPPREFIX_STORAGE_TYPES)) { continue; } final String category = key.substring(FUELINST.FUELINST_MAIN_PROPPREFIX_STORAGE_TYPES.length()); final String fuelnames = rawProperties.get(key).trim(); if(fuelnames.isEmpty()) { continue; } final HashSet<String> fuels = new HashSet<String>(Arrays.asList(fuelnames.trim().split(","))); result.put(category, Collections.unmodifiableSet(fuels)); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) intensity map from configuration information for a given year; never null but may be empty. * @param year if non-null preferred year for intensity and must be [2000,]; * this will use intensity values including the given year if possible, * else the default as for the no-argument call * * <p> * A default undated form such as <code>intensity.fuel.INTEW=0.45</code> is permitted, * in part for backward compatibility. * <p> * Other forms allowed have a suffix of: * <ul> * <li><code>.year</code> the given year, eg <code>intensity.fuel.INTEW.2021=0.45</code></li> * <li>[TODO] <code>.startYear/endYear</code> in given year range, inclusive</li> * <li>[TODO] <code>.startYear/</code> from given year, inclusive</li> * <li>[TODO] <code>./endYear</code> up to given year, inclusive</li> * </ul> * Dates specified must be unique and non-overlapping, * and startYear must not be after endYear. * <p> * This date format is potentially partly extensible to ISO8601 including ranges. * * TODO * * @return map from fuel name to kgCO2/kWh non-negative intensity; never null * */ public static Map<String, Float> getConfiguredIntensities(final Integer year) { final Map<String, Float> result = new HashMap<String, Float>(); // Have to scan through all keys, which may be inefficient... final Map<String, String> rawProperties = MainProperties.getRawProperties(); for(final String key : rawProperties.keySet()) { if(!key.startsWith(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX)) { continue; } // Simple verification that fuel name may be valid, else reject. final String keytail = key.substring(FUELINST.FUEL_INTENSITY_MAIN_PROPNAME_PREFIX.length()); if(keytail.length() < 2) { System.err.println("Trivially invalid fuel name " + key); continue; } // Extract fuel name. final String fuel; // Is the whole keytail an unqualified fule name (no date range). final boolean isUnqualified = FUELINSTUtils.FUEL_NAME_REGEX.matcher(keytail).matches(); // For the case where year is null, the entire tail must be a valid fuel name. if(year == null) { if(!isUnqualified) { // Cannot use unqualified entry with null argument. continue; } fuel = keytail; } else if(isUnqualified) { // This is a default (no date-range) default value. // Usable with a non-null year iff no value already captured for this fuel. if(!result.containsKey(keytail)) { fuel = keytail; } else { continue; } } else // year != null and this is not an unqualified entry... { // Split key tail in two at '.'. final String parts[] = keytail.split("[.]"); if(2 != parts.length) { System.err.println("Invalid fuel intensity key " + key); continue; } fuel = parts[0]; if(!FUELINSTUtils.FUEL_NAME_REGEX.matcher(fuel).matches()) { System.err.println("Invalid fuel name " + key); continue; } final int y = year; if((y < 2000) || (y >= 3000)) { throw new IllegalArgumentException("bad year " + y); } // Deal with date range cases. final int slashPos = parts[1].indexOf('/'); if(-1 != slashPos) { // Note: // assertEquals(1, "2012/".split("/").length); // assertEquals("2012", "2012/".split("/")[0]); // assertEquals(2, "/2012".split("/").length); // assertEquals("", "/2012".split("/")[0]); // assertEquals("2012", "/2012".split("/")[1]); // assertEquals(2, "2011/2012".split("/").length); // assertEquals("2011", "2011/2012".split("/")[0]); // assertEquals("2012", "2011/2012".split("/")[1]); final String slashParts[] = parts[1].split("/"); if(slashParts.length > 2) { System.err.println("Unable to parse data range for intensity value for " + key); continue; } if(!"".equals(slashParts[0]) && !FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(slashParts[0]).matches()) { System.err.println("Unable to parse data range start for intensity value for " + key); continue; } final short isYear = "".equals(slashParts[0]) ? 0 : Short.parseShort(slashParts[0]); if(isYear > y) { // Range start year is after current year, so does not apply. continue; } if(slashParts.length > 1) { if(!FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(slashParts[1]).matches()) { System.err.println("Unable to parse data range end for intensity value for " + key); continue; } final short ieYear = Short.parseShort(slashParts[1]); if(ieYear < isYear) { System.err.println("Unable to parse data range (start>end) for intensity value for " + key); continue; } if(ieYear < y) { // Range end year is before current year, so does not apply. continue; } } } // Deal with simple fuelname.year case. else if(FUELINSTUtils.FUEL_INTENSITY_YEAR_REGEX.matcher(parts[1]).matches()) { final short iYear = Short.parseShort(parts[1]); if(iYear != y) { continue; } // Wrong year. } } // Reject non-parseable and illegal (eg -ve) values. final Float intensity; try { intensity = new Float(rawProperties.get(key)); } catch(final NumberFormatException e) { System.err.println("Unable to parse kgCO2/kWh intensity value for " + key); continue; } if(!(intensity >= 0) || Float.isInfinite(intensity) || Float.isNaN(intensity)) { System.err.println("Invalid (non-positive) kgCO2/kWh intensity value for " + key); continue; } result.put(fuel, intensity); } return(Collections.unmodifiableMap(result)); } /**Extract (immutable) intensity map from configuration information; never null but may be empty. * This will use the default (eg undated) intensity value for each fuel such as * <code>intensity.fuel.INTEW=0.45</code> * else the latest-dated value. * * @return map from each fuel name to kgCO2/kWh non-negative intensity; never null */ @Deprecated public static Map<String, Float> getConfiguredIntensities() { return(getConfiguredIntensities(null)); } /**Fall-back category to assign uncategorised fuels to; single token not null nor empty. */ public static final String UNCATEGORISED_FUELS = "uncategorised"; /**If true, show recent changes in intensity, though they can be very noisy. */ private static final boolean SHOW_INTENSITY_DELTA = false; /**Extract fuel use (in MW) by category from the current summary given the fuels-by-category table; never null but may be empty. * TODO: construct 'uncategorised' component automatically */ public static Map<String,Integer> getFuelMWByCategory(final Map<String,Integer> currentGenerationMWByFuel, final Map<String,Set<String>> fuelByCategory) { if(null == currentGenerationMWByFuel) { throw new IllegalArgumentException(); } if(null == fuelByCategory) { throw new IllegalArgumentException(); } final Map<String,Integer> result = new HashMap<String, Integer>((fuelByCategory.size()*2) + 3); // Construct each category's total generation.... for(final Map.Entry<String, Set<String>> c : fuelByCategory.entrySet()) { final String category = c.getKey(); final Set<String> fuels = c.getValue(); long total = 0; for(final String fuel : fuels) { final Integer q = currentGenerationMWByFuel.get(fuel); if(null == q) { System.err.println("no per-fuel MW value for "+fuel); continue; } if(q < 0) { throw new IllegalArgumentException("invalid negative per-fuel MW value"); } total += q; } // Check for overflow. if(total > Integer.MAX_VALUE) { throw new ArithmeticException("overflow"); } result.put(category, (int) total); } return(Collections.unmodifiableMap(result)); } /**Get a format for the BM timestamps in at least FUELINST data; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getCSVTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.CSVTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All bmreports timestamps are GMT/UTC. return(sDF); } /**Get a format for the BM timestamps in at least FUELINST data; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getTIBCOTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.TIBCOTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. return(sDF); } /**Get a format compact (HH:MM) timestamps; never null. * A returned instance is not safe to share between threads. */ public static SimpleDateFormat getHHMMTimestampParser() { final SimpleDateFormat sDF = new SimpleDateFormat(FUELINSTUtils.HHMMTIMESTAMP_FORMAT); sDF.setTimeZone(FUELINSTUtils.GMT_TIME_ZONE); // All timestamps should be GMT/UTC. return(sDF); } /**Update (atomically if possible) the HTML traffic-light page. */ public static void updateHTMLFile(final long startTime, final String outputHTMLFileName, final FUELINST.CurrentSummary summary24h, final FUELINST.CurrentSummary summary7d, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status, final TwitterUtils.TwitterDetails td) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { final Map<String, String> rawProperties = MainProperties.getRawProperties(); // Write the preamble with the status text dropped in. final String statusColour = (status == null) ? null : status.toString().toLowerCase(); w.write(rawProperties.get("trafficLightPage.HTML.preamble"). replace("<!-- STATUS -->", (status == null) ? "UNKNOWN" : "<span style=\"color:"+statusColour+";background-color:black\">" + status + "</span>" + (isDataStale ? "*" : "") )); w.println(); if(isDataStale) { w.println("<p><em>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</em></p>"); } // Write out crude 'lights' with only appropriate lamp lit // and some appropriate text. final int sidePixels = GCOMP_PX_MAX; // Edge length of each 'lamp'. final String open = "<tr><th style=\"border:3px solid;height:"+sidePixels+"px;width:"+((3*sidePixels)/2)+"px"; final String close = "</th></tr>"; w.write("<div><table style=\"margin-left:auto;margin-right:auto\">"); final String weaselWord = isDataStale ? "probably " : ""; w.write(open+((status == TrafficLight.RED) ? ";background-color:red\">Grid carbon intensity is "+weaselWord+"high; please do not run big appliances such as a dishwasher or washing machine now if you can postpone" : "\">&nbsp;")+close); w.write(open+((status == TrafficLight.YELLOW) ? ";background-color:yellow\">Grid is "+weaselWord+"OK; but you could still avoid CO2 emissions by postponing running big appliances such as dishwashers or washing machines" : ((status == null) ? "\">Status is unknown" : "\">&nbsp;"))+close); w.write(open+((status == TrafficLight.GREEN) ? ";background-color:green\">Grid is "+weaselWord+"good; you might run major loads such as your dishwasher and/or washing machine now to minimise CO2 emissions" : "\">&nbsp;")+close); w.write("</table></div>"); w.println(); // Note very gently when the 7d status view is different. if(summary24h.status != summary7d.status) { w.println("<p style=\"text-align:center\">(Over a longer period, the current status is "+summary7d.status+".)</p>"); } // Note carbon savings that were available. if(summary24h.histMinIntensity < summary24h.histMaxIntensity) { w.println("<p style=\"text-align:center\">You might have saved as much as <strong style=\"font-size:xx-large\">"+FUELINSTUtils.computeVariability(summary24h.histMinIntensity, summary24h.histMaxIntensity)+"%</strong> carbon emissions by choosing the best time to run your washing and other major loads.</p>"); } // Note any recent change/delta iff the data is not stale. if(SHOW_INTENSITY_DELTA) { if(!isDataStale) { if(summary24h.recentChange == TrafficLight.GREEN) { w.println("<p style=\"color:green\">Good: carbon intensity (CO2 per kWh) is currently dropping.</p>"); } else if(summary24h.recentChange == TrafficLight.RED) { w.println("<p style=\"color:red\">Bad: carbon intensity (CO2 per kWh) is currently rising.</p>"); } } } w.println("<p>Latest data is from <strong>"+(new Date(summary24h.timestamp))+"</strong>. This page should be updated every few minutes: use your browser's refresh/reload button if you need to check again.</p>"); // If we have a Twitter account set up then brag about it here, // but only if we believe that we actually have write access to be doing updates... if(td != null) { w.print("<p>Follow this grid status on Twitter <a href=\"http://twitter.com/"); w.print(td.username); w.print("\">@"); w.print(td.username); w.print("</a>"); w.println(".</p>"); } // A bit of explanation... w.println(rawProperties.get("trafficLightPage.HTML.midamble")); // ------------------------------------------------------ // Now for the numbers... w.println("<h2>Technical Stuff</h2><p>You don't need to understand the numbers below, but some people like to see them!</p>"); // Replace estimate of end-user intensity with recent historical mean if the data is stale. w.write("<p>"); w.write(isDataStale ? "Recent effective carbon intensity for a domestic user at this time of day was " : "Effective grid carbon intensity for a domestic user is currently "); if(null != status) { w.write("<span style=\"font-size:xx-large;color:"+statusColour+";background-color:black\">"); } w.write(String.valueOf(Math.round((isDataStale ? summary24h.histAveIntensity : summary24h.currentIntensity) * (1 + summary24h.totalGridLosses)))); w.write("gCO2/kWh"); if(null != status) { w.write("</span>"); } w.write(" including transmission and distribution losses of "); w.write(String.valueOf(Math.round(100 * summary24h.totalGridLosses))); w.write("%.</p>"); w.println(); w.println("<p>Latest available grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) is approximately <strong>"+summary24h.currentIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.timestamp))+" over "+ summary24h.currentMW+"MW of generation, with a rolling average over "+((summary24h.histWindowSize+1800000) / 3600000)+"h of <strong>"+summary24h.histAveIntensity+"gCO2/kWh</strong>.</p>"); w.println("<p>Minimum grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histMinIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.minIntensityRecordTimestamp))+".</p>"); w.println("<p>Maximum grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histMaxIntensity+"gCO2/kWh</strong> at "+(new Date(summary24h.maxIntensityRecordTimestamp))+".</p>"); w.println("<p>Average/mean grid <strong>generation</strong> carbon intensity (ignoring transmission/distribution losses) was approximately <strong>"+summary24h.histAveIntensity+"gCO2/kWh</strong> over the sample data set, with an effective end-user intensity including transmission and distribution losses of <strong>"+(Math.round(summary24h.histAveIntensity * (1 + summary24h.totalGridLosses)))+"gCO2/kWh</strong>.</p>"); // Intensity (and generation) by hour of day. final int newSlot = FUELINST.CurrentSummary.getGMTHourOfDay(startTime); w.write("<div><table style=\"margin-left:auto;margin-right:auto\">"); w.write("<tr><th colspan=\"24\">"); w.write(isDataStale ? "Last available historical" : "Recent"); w.write(" mean GMT hourly generation intensity gCO2/kWh (average="+summary24h.histAveIntensity+"); *now (="+summary24h.currentIntensity+")</th></tr>"); w.write("<tr>"); // Always start at midnight GMT if the data is stale. final int startSlot = isDataStale ? 0 : (1 + Math.max(0, newSlot)) % 24; for(int h = 0; h < 24; ++h) { final StringBuffer sbh = new StringBuffer(2); final int displayHourGMT = (h + startSlot) % 24; sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } if(hourOfDayHistorical == displayHourGMT) { sbh.append('*'); } w.write("<th style=\"border:1px solid\">"+sbh+"</th>"); } w.write("</tr>"); w.write("<tr>"); boolean usedLessGreen = false; final int maxHourlyIntensity = summary24h.histAveIntensityByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final Integer hIntensity = summary24h.histAveIntensityByHourOfDay.get(displayHourGMT); if((null == hIntensity) || (0 == hIntensity)) { w.write("<td></td>"); continue; /* Skip empty slot. */ } final TrafficLight rawHourStatus = summary24h.selectColour(hIntensity); // But if the colour is GREEN but we're using pumped storage // then switch to a paler shade instead (ie mainly green, but not fully)... final boolean lessGreen = ((TrafficLight.GREEN == rawHourStatus) && (summary24h.histAveStorageDrawdownByHourOfDay.get(displayHourGMT) > 0)); if(lessGreen) { usedLessGreen = true; } final String barColour = lessGreen ? FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN : rawHourStatus.toString().toLowerCase(); final int height = (GCOMP_PX_MAX*hIntensity) / Math.max(1, maxHourlyIntensity); w.write("<td style=\"width:30px\"><ul class=\"barGraph\">"); w.write("<li style=\"background-color:"+barColour+";height:"+height+"px;left:0\">"); w.write(String.valueOf(hIntensity)); w.write("</li>"); w.write("</ul></td>"); } w.write("</tr>"); w.write("<tr><th colspan=\"24\">Mean GMT hourly generation GW (<span style=\"color:gray\">all</span>, <span style=\"color:green\">zero-carbon</span>)</th></tr>"); w.write("<tr>"); // Compute the maximum generation in any of the hourly slots // to give us maximum scaling of the displayed bars. final int maxGenerationMW = summary24h.histAveGenerationByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final Integer hGeneration = summary24h.histAveGenerationByHourOfDay.get(displayHourGMT); if((null == hGeneration) || (0 == hGeneration)) { w.write("<td></td>"); continue; /* Skip empty slot. */ } final int height = (GCOMP_PX_MAX*hGeneration) / Math.max(1, maxGenerationMW); final int scaledToGW = (hGeneration + 500) / 1000; w.write("<td style=\"width:30px\"><ul class=\"barGraph\">"); w.write("<li style=\"background-color:gray;height:"+height+"px;left:0\">"); w.write(String.valueOf(scaledToGW)); w.write("</li>"); final int hZCGeneration = summary24h.histAveZCGenerationByHourOfDay.get0(displayHourGMT); if(0 != hZCGeneration) { w.write("<li style=\"background-color:green;height:"+((GCOMP_PX_MAX*hZCGeneration) / Math.max(1, maxGenerationMW))+"px;left:0\">"); if(hZCGeneration >= (maxGenerationMW/8)) { w.write(String.valueOf((hZCGeneration + 500) / 1000)); } w.write("</li>"); } // final int hDrawdown = summary.histAveStorageDrawdownByHourOfDay.get0(displayHourGMT); // if(0 != hDrawdown) // { // w.write("<li style=\"background-color:yellow;height:"+((GCOMP_PX_MAX*hDrawdown) / Math.max(1, maxGenerationMW))+"px;left:0px;\">"); // if(hDrawdown >= maxGenerationMW/8) { w.write(String.valueOf((hDrawdown + 500) / 1000)); } // w.write("</li>"); // } w.write("</ul></td>"); } w.write("</tr>"); w.write("</table></div>"); w.println(); // Footnotes if(usedLessGreen) { w.println("<p>Hours that are basically <span style=\"color:green\">green</span>, but in which there is draw-down from grid-connected storage with its attendant energy losses and also suggesting that little or no excess non-dispatchable generation is available, ie that are marginally green, are shaded <span style=\"color:"+FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN+"\">"+FUELINSTUtils.LESS_GREEN_STORAGE_DRAWDOWN+"</span>.</p>"); } // TODO: Show cumulative MWh and tCO2. if(!isDataStale) { // Show some stats only relevant for live data... w.write("<p>Current/latest fuel mix at "); w.write(String.valueOf(new Date(summary24h.timestamp))); w.write(':'); final SortedMap<String,Integer> power = new TreeMap<String, Integer>(summary24h.currentGenerationMWByFuelMW); for(final String fuel : power.keySet()) { w.write(' '); w.write(fuel); w.write("@"+power.get(fuel)+"MW"); } w.write(".</p>"); w.println(); if(summary24h.currentStorageDrawdownMW > 0) { w.write("<p>Current draw-down from storage is "); w.write(Long.toString(summary24h.currentStorageDrawdownMW)); w.write("MW.</p>"); w.println(); } // Show fuels broken down by category, if categories are assigned. final Map<String, Set<String>> byCategory = getFuelsByCategory(); if(!byCategory.isEmpty()) { final Map<String,Integer> byCat = getFuelMWByCategory(summary24h.currentGenerationMWByFuelMW, byCategory); w.write("<p>Generation by fuel category (may overlap):</p><dl>"); final SortedMap<String,Integer> powerbyCat = new TreeMap<String, Integer>(byCat); for(final String category : powerbyCat.keySet()) { final Integer genMW = powerbyCat.get(category); final int percent = Math.round((100.0f * genMW) / Math.max(1, summary24h.currentMW)); w.write("<dt>"); w.write(category); w.write(" @ "); w.write(Integer.toString(percent)); w.write("%</dt>"); w.write("<dd>"); // Write MW under this category. w.write(String.valueOf(genMW)); w.write("MW"); // Write sorted fuel list... w.write(" "); w.write((new ArrayList<String>(new TreeSet<String>(byCategory.get(category)))).toString()); w.write(""); w.write("</dd>"); } w.write("</dl>"); w.println(); } } final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final int intensityYear = todayUTC.getYear(); w.write("<p>Overall generation intensity (kgCO2/kWh) computed using the following fuel year-"+intensityYear+" intensities (other fuels/sources are ignored):"); final Map<String, Float> configuredIntensities = FUELINSTUtils.getConfiguredIntensities(intensityYear); final SortedMap<String,Float> intensities = new TreeMap<String, Float>(FUELINSTUtils.getConfiguredIntensities(intensityYear)); for(final String fuel : intensities.keySet()) { w.write(' '); w.write(fuel); w.write("="+intensities.get(fuel)); } w.write(".</p>"); w.println(); w.write("<p>Rolling correlation of fuel use against grid intensity (-ve implies that this fuel reduces grid intensity for non-callable sources):"); final SortedMap<String,Float> goodness = new TreeMap<String, Float>(summary24h.correlationIntensityToFuel); for(final String fuel : goodness.keySet()) { w.format(" %s=%.4f", fuel, goodness.get(fuel)); } w.write(".</p>"); w.println(); // Key for fuel names/codes if available. final SortedMap<String,String> fullFuelNames = new TreeMap<String,String>(FUELINSTUtils.getConfiguredFuelNames()); if(!fullFuelNames.isEmpty()) { w.write("<p>Key to fuel codes:</p><dl>"); for(final String fuel : fullFuelNames.keySet()) { w.write("<dt>"); w.write(fuel); w.write("</dt>"); w.write("<dd>"); w.write(fullFuelNames.get(fuel)); w.write("</dd>"); } w.write("</dl>"); w.println(); } // Some coverage information from the summaries. w.write("<p>(Histogram input windows: "); w.write(Long.toString((summary24h.histWindowSize + (1800*1000)) / (3600*1000))); w.write("h, "); w.write(Long.toString((summary7d.histWindowSize + (1800*1000)) / (3600*1000))); w.write("h"); w.write(".)</p>"); w.println(); w.println("<h3>Methodology</h3>"); w.println(rawProperties.get("methodology.HTML")); w.println("<p>This page updated at "+(new Date())+"; generation time "+(System.currentTimeMillis()-startTime)+"ms.</p>"); w.println(rawProperties.get("trafficLightPage.HTML.postamble")); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of HTML page... DataUtils.replacePublishedFile(outputHTMLFileName, baos.toByteArray()); } /**Update (atomically if possible) the plain-text bare gCO2e/kWh intensity value. * The file will be removed if the data is stale. * Predicted values are not published, only live fresh ones. */ static void updateTXTFile(final long startTime, final String outputTXTFileName, final CurrentSummary summary, final boolean isDataStale) throws IOException { // In case of stale/missing data remove any result file. if(isDataStale || (null == summary)) { (new File(outputTXTFileName)).delete(); return; } final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { w.write(String.valueOf(Math.round(summary.currentIntensity * (1 + summary.totalGridLosses)))); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of HTML page... DataUtils.replacePublishedFile(outputTXTFileName, baos.toByteArray()); } /**Update (atomically if possible) the mobile-friendly XHTML traffic-light page. * The generated page is designed to be very light-weight * and usable by a mobile phone (eg as if under the .mobi TLD). */ static void updateXHTMLFile(final long startTime, final String outputXHTMLFileName, final FUELINST.CurrentSummary summary, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(8192); final PrintWriter w = new PrintWriter(baos); try { final Map<String, String> rawProperties = MainProperties.getRawProperties(); w.println(rawProperties.get("trafficLightPage.XHTML.preamble")); w.println("<div style=\"background:"+((status == null) ? "gray" : status.toString().toLowerCase())+"\">"); final String weaselWord = isDataStale ? "probably " : ""; if(status == TrafficLight.RED) { w.println("Status RED: grid carbon intensity is "+weaselWord+"high; please do not run big appliances such as a dishwasher or washing machine now if you can postpone."); } else if(status == TrafficLight.GREEN) { w.println("Status GREEN: grid is "+weaselWord+"good; run appliances now to minimise CO2 emissions."); } else if(status == TrafficLight.YELLOW) { w.println("Status YELLOW: grid is "+weaselWord+"OK; but you could still avoid CO2 emissions by postponing running big appliances such as dishwashers or washing machines."); } else { w.println("Grid status is UNKNOWN."); } w.println("</div>"); if(isDataStale) { w.println("<p><em>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</em></p>"); } w.println("<p>This page updated at "+(new Date())+".</p>"); w.println(rawProperties.get("trafficLightPage.XHTML.postamble")); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of XHTML page... DataUtils.replacePublishedFile(outputXHTMLFileName, baos.toByteArray()); } /**Update (atomically if possible) the XML traffic-light data dump. * Dumps current-year (at time call is run) fuel intensities. */ public static void updateXMLFile(final long startTime, final String outputXMLFileName, final FUELINST.CurrentSummary summary, final boolean isDataStale, final int hourOfDayHistorical, final TrafficLight status) throws IOException { final ByteArrayOutputStream baos = new ByteArrayOutputStream(16384); final PrintWriter w = new PrintWriter(baos); try { // final Map<String, String> rawProperties = MainProperties.getRawProperties(); w.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); w.println("<results>"); if(isDataStale) { w.println("<warning>*WARNING: cannot obtain current data so this is partly based on predictions from historical data (for "+hourOfDayHistorical+":XX GMT).</warning>"); } w.println("<stale_data>"+isDataStale+"</stale_data>"); // if(status == TrafficLight.RED) // { w.println("<status>1</status>"); } // else if(status == TrafficLight.YELLOW) // { w.println("<status>0</status>"); } // else if(status == TrafficLight.GREEN) // { w.println("<status>-1</status>"); } w.print("<status>"); if(null != status) { w.print(status); } w.println("</status>"); if(summary.histMinIntensity < summary.histMaxIntensity) { w.println("<saving>"+FUELINSTUtils.computeVariability(summary.histMinIntensity, summary.histMaxIntensity)+"</saving>"); } // Note any recent change/delta if the data is not stale. if(!isDataStale) { if(summary.recentChange == TrafficLight.GREEN) // { w.println("<carbon_intensity>-1</carbon_intensity>"); } { w.println("<carbon_intensity>GREEN</carbon_intensity>"); } else if(summary.recentChange == TrafficLight.RED) // { w.println("<carbon_intensity>1</carbon_intensity>"); } { w.println("<carbon_intensity>RED</carbon_intensity>"); } } w.println("<timestamp>"+summary.timestamp+"</timestamp>"); w.write("<grid_carbon_intensity>"); w.write(String.valueOf(Math.round((isDataStale ? summary.histAveIntensity : summary.currentIntensity) * (1 + summary.totalGridLosses)))); w.write("</grid_carbon_intensity>"); w.println(); w.write("<transmission_losses>"); w.write(String.valueOf(Math.round(100 * summary.totalGridLosses))); w.write("</transmission_losses>"); w.println(); w.println("<latest>"); w.println("<carbon_intensity>"+ summary.currentIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.timestamp +"</timestamp>"); w.println("<generation>"+ summary.currentMW+"</generation>"); w.println("<rolling_average_period>"+((summary.histWindowSize+1800000) / 3600000)+"</rolling_average_period>"); w.println("<rolling_average_carbon_intensity>"+ summary.histAveIntensity+"</rolling_average_carbon_intensity>"); w.println("</latest>"); w.println("<minimum>"); w.println("<carbon_intensity>"+ summary.histMinIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.minIntensityRecordTimestamp +"</timestamp>"); w.println("</minimum>"); w.println("<maximum>"); w.println("<carbon_intensity>"+ summary.histMaxIntensity +"</carbon_intensity>"); w.println("<timestamp>"+ summary.maxIntensityRecordTimestamp +"</timestamp>"); w.println("</maximum>"); // Intensity (and generation) by hour of day. final int newSlot = FUELINST.CurrentSummary.getGMTHourOfDay(startTime); w.println("<generation_intensity>"); w.println("<average>"+summary.histAveIntensity+"</average>"); w.println("<current>"+summary.currentIntensity+"</current>"); // Always start at midnight GMT if the data is stale. final int startSlot = isDataStale ? 0 : (1 + Math.max(0, newSlot)) % 24; // final int maxHourlyIntensity = summary.histAveIntensityByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final StringBuffer sbh = new StringBuffer(2); final int displayHourGMT = (h + startSlot) % 24; sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } final Integer hIntensity = summary.histAveIntensityByHourOfDay.get(displayHourGMT); w.println("<sample>"); w.println("<hour>"+sbh+"</hour>"); w.println("<carbon_intensity>"); if((null == hIntensity) || (0 == hIntensity)) { /* Empty slot. */ } else { w.println(String.valueOf(hIntensity)); } w.println("</carbon_intensity>"); w.println("</sample>"); } w.println("</generation_intensity>"); w.println("<generation>"); // Compute the maximum generation in any of the hourly slots // to give us maximum scaling of the displayed bars. final int maxGenerationMW = summary.histAveGenerationByHourOfDay.max0(); for(int h = 0; h < 24; ++h) { final int displayHourGMT = (h + startSlot) % 24; final StringBuffer sbh = new StringBuffer(2); sbh.append(displayHourGMT); if(sbh.length() < 2) { sbh.insert(0, '0'); } final Integer hGeneration = summary.histAveGenerationByHourOfDay.get(displayHourGMT); if((null == hGeneration) || (0 == hGeneration)) { continue; /* Skip empty slot. */ } // final int height = (GCOMP_PX_MAX*hGeneration) / Math.max(1, maxGenerationMW); final int scaledToGW = (hGeneration + 500) / 1000; w.println("<sample>"); w.println("<hour>"+sbh+"</hour>"); w.println("<all>"+String.valueOf(scaledToGW)+"</all>"); final int hZCGeneration = summary.histAveZCGenerationByHourOfDay.get0(displayHourGMT); if(0 != hZCGeneration) { if(hZCGeneration >= (maxGenerationMW/8)) { w.println("<zero_carbon>"+String.valueOf((hZCGeneration + 500) / 1000)+"</zero_carbon>"); } } w.println("</sample>"); } w.println("</generation>"); // TODO: Show cumulative MWh and tCO2. // FIXME: DHD20090608: I suggest leaving the fuel names as-is (upper case) in the XML as those are the 'formal' Elexon names; convert for display if need be. // FIXME: DHD20090608: As fuel names may not always be XML-token-safe, maybe <fuel name="NNN">amount</fuel> would be better? if(!isDataStale) { w.println("<fuel_mix>"); w.println("<timestamp>"+summary.timestamp+"</timestamp>"); final SortedMap<String,Integer> power = new TreeMap<String, Integer>(summary.currentGenerationMWByFuelMW); for(final String fuel : power.keySet()) { w.println("<"+fuel.toLowerCase()+">"+power.get(fuel)+"</"+fuel.toLowerCase()+">"); } w.println("</fuel_mix>"); } w.println("<fuel_intensities>"); w.println("<timestamp>"+summary.timestamp+"</timestamp>"); // Note: current-year intensities are used. final LocalDate todayUTC = LocalDate.now(ZoneOffset.UTC); final int intensityYear = todayUTC.getYear(); final SortedMap<String,Float> intensities = new TreeMap<String, Float>(FUELINSTUtils.getConfiguredIntensities(intensityYear)); for(final String fuel : intensities.keySet()) { w.println("<"+fuel.toLowerCase()+">"+intensities.get(fuel)+"</"+fuel.toLowerCase()+">"); } w.println("</fuel_intensities>"); w.println("</results>"); w.flush(); } finally { w.close(); /* Ensure file is flushed/closed. Release resources. */ } // Attempt atomic replacement of XML page... DataUtils.replacePublishedFile(outputXMLFileName, baos.toByteArray()); } }
More diagnostic logging.
javasrc/org/hd/d/edh/FUELINSTUtils.java
More diagnostic logging.
Java
apache-2.0
db01076d20206137b4f8cfbbf5af71ca6ecdca31
0
darranl/directory-shared
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.name; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.naming.CompoundName; import javax.naming.InvalidNameException; import javax.naming.Name; import javax.naming.NamingException; import javax.naming.ldap.LdapName; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.name.LdapDnParser; import org.apache.directory.shared.ldap.name.Rdn; import org.apache.directory.shared.ldap.schema.normalizers.DeepTrimToLowerNormalizer; import org.apache.directory.shared.ldap.schema.normalizers.OidNormalizer; import org.apache.directory.shared.ldap.util.StringTools; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import static org.junit.Assert.assertSame; /** * Test the class LdapDN * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> * @version $Rev$, $Date$, */ public class LdapDNTest { private Map<String, OidNormalizer> oids; private Map<String, OidNormalizer> oidOids; /** * Initialize OIDs maps for normalization */ @Before public void initMapOids() { oids = new HashMap<String, OidNormalizer>(); oids.put( "dc", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "domaincomponent", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "0.9.2342.19200300.100.1.25", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "ou", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); oids.put( "organizationalUnitName", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); oids.put( "2.5.4.11", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); // Another map where we store OIDs instead of names. oidOids = new HashMap<String, OidNormalizer>(); oidOids.put( "dc", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "domaincomponent", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "0.9.2342.19200300.100.1.25", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "ou", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "organizationalUnitName", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "2.5.4.11", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); } // ~ Methods // ------------------------------------------------------------------------------------ // CONSTRUCTOR functions -------------------------------------------------- /** * Test a null DN */ @Test public void testLdapDNNull() { LdapDN dn = new LdapDN(); assertEquals( "", dn.getUpName() ); assertTrue( dn.isEmpty() ); } /** * test an empty DN */ @Test public void testLdapDNEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); assertEquals( "", dn.getUpName() ); assertTrue( dn.isEmpty() ); } /** * test a simple DN : a = b */ @Test public void testLdapDNSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); assertTrue( LdapDN.isValid( "a = b" ) ); assertEquals( "a = b", dn.getUpName() ); assertEquals( "a=b", dn.toString() ); } /** * test a simple DN with some spaces : "a = b " */ @Test public void testLdapDNSimpleWithSpaces() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b " ); assertTrue( LdapDN.isValid( "a = b " ) ); assertEquals( "a = b ", dn.getUpName() ); assertEquals( "a=b", dn.toString() ); } /** * test a composite DN : a = b, d = e */ @Test public void testLdapDNComposite() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b, c = d" ); assertTrue( LdapDN.isValid( "a = b, c = d" ) ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a = b, c = d", dn.getUpName() ); } /** * test a composite DN with spaces : a = b , d = e */ @Test public void testLdapDNCompositeWithSpaces() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b , c = d" ); assertTrue( LdapDN.isValid( "a = b , c = d" ) ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a = b , c = d", dn.getUpName() ); } /** * test a composite DN with or without spaces: a=b, a =b, a= b, a = b, a = b */ @Test public void testLdapDNCompositeWithSpace() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, a =b, a= b, a = b, a = b" ); assertTrue( LdapDN.isValid( "a=b, a =b, a= b, a = b, a = b" ) ); assertEquals( "a=b,a=b,a=b,a=b,a=b", dn.toString() ); assertEquals( "a=b, a =b, a= b, a = b, a = b", dn.getUpName() ); } /** * test a composite DN with differents separators : a=b;c=d,e=f It should * return a=b,c=d,e=f (the ';' is replaced by a ',') */ @Test public void testLdapDNCompositeSepators() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b;c=d,e=f" ); assertTrue( LdapDN.isValid( "a=b;c=d,e=f" ) ); assertEquals( "a=b,c=d,e=f", dn.toString() ); assertEquals( "a=b;c=d,e=f", dn.getUpName() ); } /** * test a simple DN with multiple NameComponents : a = b + c = d */ @Test public void testLdapDNSimpleMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c = d" ); assertTrue( LdapDN.isValid( "a = b + c = d" ) ); assertEquals( "a=b+c=d", dn.toString() ); assertEquals( "a = b + c = d", dn.getUpName() ); } /** * test a composite DN with multiple NC and separators : a=b+c=d, e=f + g=h + * i=j */ @Test public void testLdapDNCompositeMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b+c=d, e=f + g=h + i=j" ); assertTrue( LdapDN.isValid( "a=b+c=d, e=f + g=h + i=j" ) ); assertEquals( "a=b+c=d,e=f+g=h+i=j", dn.toString() ); assertEquals( "a=b+c=d, e=f + g=h + i=j", dn.getUpName() ); } /** * Test to see if a DN with multiRdn values is preserved after an addAll. */ @Test public void testAddAllWithMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "cn=Kate Bush+sn=Bush,ou=system" ); LdapDN target = new LdapDN(); assertTrue( LdapDN.isValid( "cn=Kate Bush+sn=Bush,ou=system" ) ); target.addAll( target.size(), dn ); assertEquals( "cn=Kate Bush+sn=Bush,ou=system", target.toString() ); assertEquals( "cn=Kate Bush+sn=Bush,ou=system", target.getUpName() ); } /** * test a simple DN with an oid prefix (uppercase) : OID.12.34.56 = azerty */ @Test public void testLdapDNOidUpper() throws InvalidNameException { LdapDN dn = new LdapDN( "OID.12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "OID.12.34.56 = azerty" ) ); assertEquals( "oid.12.34.56=azerty", dn.toString() ); assertEquals( "OID.12.34.56 = azerty", dn.getUpName() ); } /** * test a simple DN with an oid prefix (lowercase) : oid.12.34.56 = azerty */ @Test public void testLdapDNOidLower() throws InvalidNameException { LdapDN dn = new LdapDN( "oid.12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "oid.12.34.56 = azerty" ) ); assertEquals( "oid.12.34.56=azerty", dn.toString() ); assertEquals( "oid.12.34.56 = azerty", dn.getUpName() ); } /** * test a simple DN with an oid attribut without oid prefix : 12.34.56 = * azerty */ @Test public void testLdapDNOidWithoutPrefix() throws InvalidNameException { LdapDN dn = new LdapDN( "12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "12.34.56 = azerty" ) ); assertEquals( "12.34.56=azerty", dn.toString() ); assertEquals( "12.34.56 = azerty", dn.getUpName() ); } /** * test a composite DN with an oid attribut wiithout oid prefix : 12.34.56 = * azerty; 7.8 = test */ @Test public void testLdapDNCompositeOidWithoutPrefix() throws InvalidNameException { LdapDN dn = new LdapDN( "12.34.56 = azerty; 7.8 = test" ); assertTrue( LdapDN.isValid( "12.34.56 = azerty; 7.8 = test" ) ); assertEquals( "12.34.56=azerty,7.8=test", dn.toString() ); assertEquals( "12.34.56 = azerty; 7.8 = test", dn.getUpName() ); } /** * test a simple DN with pair char attribute value : a = \,\=\+\<\>\#\;\\\"\C4\8D" */ @Test public void testLdapDNPairCharAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D" ); assertTrue( LdapDN.isValid( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D" ) ); assertEquals( "a=\\,=\\+\\<\\>#\\;\\\\\\\"\u010D", dn.toString() ); assertEquals( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D", dn.getUpName() ); } /** * test a simple DN with pair char attribute value : "SN=Lu\C4\8Di\C4\87" */ @Test public void testLdapDNRFC253_Lucic() throws InvalidNameException { LdapDN dn = new LdapDN( "SN=Lu\\C4\\8Di\\C4\\87" ); assertTrue( LdapDN.isValid( "SN=Lu\\C4\\8Di\\C4\\87" ) ); assertEquals( "sn=Lu\u010Di\u0107", dn.toString() ); assertEquals( "SN=Lu\\C4\\8Di\\C4\\87", dn.getUpName() ); } /** * test a simple DN with hexString attribute value : a = #0010A0AAFF */ @Test public void testLdapDNHexStringAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = #0010A0AAFF" ); assertTrue( LdapDN.isValid( "a = #0010A0AAFF" ) ); assertEquals( "a=#0010A0AAFF", dn.toString() ); assertEquals( "a = #0010A0AAFF", dn.getUpName() ); } /** * test a simple DN with a # on first position */ @Test public void testLdapDNSharpFirst() throws InvalidNameException, NamingException { LdapDN dn = new LdapDN( "a = \\#this is a sharp" ); assertTrue( LdapDN.isValid( "a = \\#this is a sharp" ) ); assertEquals( "a=\\#this is a sharp", dn.toString() ); assertEquals( "a = \\#this is a sharp", dn.getUpName() ); Rdn rdn = dn.getRdn(); assertEquals( "a = \\#this is a sharp", rdn.getUpName() ); } /** * Normalize a simple DN with a # on first position */ @Test public void testNormalizeLdapDNSharpFirst() throws InvalidNameException, NamingException { LdapDN dn = new LdapDN( "ou = \\#this is a sharp" ); assertTrue( LdapDN.isValid( "ou = \\#this is a sharp" ) ); assertEquals( "ou=\\#this is a sharp", dn.toString() ); assertEquals( "ou = \\#this is a sharp", dn.getUpName() ); // Check the normalization now LdapDN ndn = dn.normalize( oidOids ); assertEquals( "ou = \\#this is a sharp", ndn.getUpName() ); assertEquals( "2.5.4.11=\\#this is a sharp", ndn.toString() ); } /** * Normalize a DN with sequence ESC ESC HEX HEX (\\DC). * This is a corner case for the parser and normalizer. */ @Test public void testNormalizeLdapDNEscEscHexHex() throws NamingException { LdapDN dn = new LdapDN( "ou = AC\\\\DC" ); assertTrue( LdapDN.isValid( "ou = AC\\\\DC" ) ); assertEquals( "ou=AC\\\\DC", dn.toString() ); assertEquals( "ou = AC\\\\DC", dn.getUpName() ); // Check the normalization now LdapDN ndn = dn.normalize( oidOids ); assertEquals( "ou = AC\\\\DC", ndn.getUpName() ); assertEquals( "2.5.4.11=ac\\\\dc", ndn.toString() ); } /** * test a simple DN with a wrong hexString attribute value : a = #0010Z0AAFF */ @Test public void testLdapDNWrongHexStringAttributeValue() { try { new LdapDN( "a = #0010Z0AAFF" ); fail(); } catch ( InvalidNameException ine ) { assertFalse( LdapDN.isValid( "a = #0010Z0AAFF" ) ); assertTrue( true ); } } /** * test a simple DN with a wrong hexString attribute value : a = #AABBCCDD3 */ @Test public void testLdapDNWrongHexStringAttributeValue2() { try { new LdapDN( "a = #AABBCCDD3" ); fail(); } catch ( InvalidNameException ine ) { assertFalse( LdapDN.isValid( "a = #AABBCCDD3" ) ); assertTrue( true ); } } /** * test a simple DN with a quote in attribute value : a = quoted \"value\" */ @Test public void testLdapDNQuoteInAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = quoted \\\"value\\\"" ); assertTrue( LdapDN.isValid( "a = quoted \\\"value\\\"" ) ); assertEquals( "a=quoted \\\"value\\\"", dn.toString() ); assertEquals( "a = quoted \\\"value\\\"", dn.getUpName() ); } /** * test a simple DN with quoted attribute value : a = \" quoted value \" */ @Test public void testLdapDNQuotedAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = \\\" quoted value \\\"" ); assertTrue( LdapDN.isValid( "a = \\\" quoted value \\\"" ) ); assertEquals( "a=\\\" quoted value \\\"", dn.toString() ); assertEquals( "a = \\\" quoted value \\\"", dn.getUpName() ); } /** * test a simple DN with a comma at the end */ @Test public void testLdapDNComaAtEnd() { assertFalse( LdapDN.isValid( "a = b," ) ); assertFalse( LdapDN.isValid( "a = b, " ) ); try { new LdapDN( "a = b," ); fail(); } catch ( InvalidNameException ine ) { assertTrue( true ); } } // REMOVE operation ------------------------------------------------------- /** * test a remove from position 0 */ @Test public void testLdapDNRemove0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "e=f", dn.remove( 0 ).toString() ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a=b, c=d", dn.getUpName() ); } /** * test a remove from position 1 */ @Test public void testLdapDNRemove1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "c=d", dn.remove( 1 ).toString() ); assertEquals( "a=b, e=f", dn.getUpName() ); } /** * test a remove from position 2 */ @Test public void testLdapDNRemove2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "a=b", dn.remove( 2 ).toString() ); assertEquals( " c=d, e=f", dn.getUpName() ); } /** * test a remove from position 1 whith semi colon */ @Test public void testLdapDNRemove1WithSemiColon() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d; e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d; e=f" ) ); assertEquals( "c=d", dn.remove( 1 ).toString() ); assertEquals( "a=b, e=f", dn.getUpName() ); } /** * test a remove out of bound */ @Test public void testLdapDNRemoveOutOfBound() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d; e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d; e=f" ) ); try { dn.remove( 4 ); // We whould never reach this point fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } // SIZE operations /** * test a 0 size */ @Test public void testLdapDNSize0() { LdapDN dn = new LdapDN(); assertTrue( LdapDN.isValid( "" ) ); assertEquals( 0, dn.size() ); } /** * test a 1 size */ @Test public void testLdapDNSize1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); assertTrue( LdapDN.isValid( "a=b" ) ); assertEquals( 1, dn.size() ); } /** * test a 3 size */ @Test public void testLdapDNSize3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( 3, dn.size() ); } /** * test a 3 size with NameComponents */ @Test public void testLdapDNSize3NC() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b+c=d, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b+c=d, c=d, e=f" ) ); assertEquals( 3, dn.size() ); } /** * test size after operations */ @Test public void testLdapResizing() throws InvalidNameException { LdapDN dn = new LdapDN(); assertEquals( 0, dn.size() ); dn.add( "e = f" ); assertEquals( 1, dn.size() ); dn.add( "c = d" ); assertEquals( 2, dn.size() ); dn.remove( 0 ); assertEquals( 1, dn.size() ); dn.remove( 0 ); assertEquals( 0, dn.size() ); } // ADD Operations /** * test Add on a new LdapDN */ @Test public void testLdapEmptyAdd() throws InvalidNameException { LdapDN dn = new LdapDN(); dn.add( "e = f" ); assertEquals( "e=f", dn.toString() ); assertEquals( "e = f", dn.getUpName() ); assertEquals( 1, dn.size() ); } /** * test Add to an existing LdapDN */ @Test public void testLdapDNAdd() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( "e = f" ); assertEquals( "e=f,a=b,c=d", dn.toString() ); assertEquals( "e = f,a=b, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add a composite RDN to an existing LdapDN */ @Test public void testLdapDNAddComposite() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( "e = f + g = h" ); // Warning ! The order of AVAs has changed during the parsing // This has no impact on the correctness of the DN, but the // String used to do the comparizon should be inverted. assertEquals( "e=f+g=h,a=b,c=d", dn.toString() ); assertEquals( 3, dn.size() ); } /** * test Add at the end of an existing LdapDN */ @Test public void testLdapDNAddEnd() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( dn.size(), "e = f" ); assertEquals( "e = f,a=b, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add at the start of an existing LdapDN */ @Test public void testLdapDNAddStart() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( 0, "e = f" ); assertEquals( "a=b, c=d,e = f", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add at the middle of an existing LdapDN */ @Test public void testLdapDNAddMiddle() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( 1, "e = f" ); assertEquals( "a=b,e = f, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } // ADD ALL Operations /** * Test AddAll * * @throws InvalidNameException */ @Test public void testLdapDNAddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( dn2 ); assertEquals( "c = d,a = b", dn.getUpName() ); } /** * Test AddAll with an empty added name * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAddedNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN(); dn.addAll( dn2 ); assertEquals( "a=b", dn.toString() ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll to an empty name * * @throws InvalidNameException */ @Test public void testLdapDNAddAllNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN(); LdapDN dn2 = new LdapDN( "a = b" ); dn.addAll( dn2 ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAt0AddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( 0, dn2 ); assertEquals( "a = b,c = d", dn.getUpName() ); } /** * Test AddAll at position 1 * * @throws InvalidNameException */ @Test public void testLdapDNAt1AddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( 1, dn2 ); assertEquals( "c = d,a = b", dn.getUpName() ); } /** * Test AddAll at the middle * * @throws InvalidNameException */ @Test public void testLdapDNAtTheMiddleAddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b, c = d" ); LdapDN dn2 = new LdapDN( "e = f" ); dn.addAll( 1, dn2 ); assertEquals( "a = b,e = f, c = d", dn.getUpName() ); } /** * Test AddAll with an empty added name at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAt0AddedNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN(); dn.addAll( 0, dn2 ); assertEquals( "a=b", dn.toString() ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll to an empty name at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAt0NameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN(); LdapDN dn2 = new LdapDN( "a = b" ); dn.addAll( 0, dn2 ); assertEquals( "a = b", dn.getUpName() ); } // GET PREFIX actions /** * Get the prefix at pos 0 */ @Test public void testLdapDNGetPrefixPos0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 0 ) ); assertEquals( "", newDn.getUpName() ); } /** * Get the prefix at pos 1 */ @Test public void testLdapDNGetPrefixPos1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 1 ) ); assertEquals( "e = f", newDn.getUpName() ); } /** * Get the prefix at pos 2 */ @Test public void testLdapDNGetPrefixPos2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 2 ) ); assertEquals( " c=d,e = f", newDn.getUpName() ); } /** * Get the prefix at pos 3 */ @Test public void testLdapDNGetPrefixPos3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 3 ) ); assertEquals( "a=b, c=d,e = f", newDn.getUpName() ); } /** * Get the prefix out of bound */ @Test public void testLdapDNGetPrefixPos4() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); try { dn.getPrefix( 4 ); // We should not reach this point. fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } /** * Get the prefix of an empty LdapName */ @Test public void testLdapDNGetPrefixEmptyDN() { LdapDN dn = new LdapDN(); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 0 ) ); assertEquals( "", newDn.getUpName() ); } // GET SUFFIX operations /** * Get the suffix at pos 0 */ @Test public void testLdapDNGetSuffixPos0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 0 ) ); assertEquals( "a=b, c=d,e = f", newDn.getUpName() ); } /** * Get the suffix at pos 1 */ @Test public void testLdapDNGetSuffixPos1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 1 ) ); assertEquals( "a=b, c=d", newDn.getUpName() ); } /** * Get the suffix at pos 2 */ @Test public void testLdapDNGetSuffixPos2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 2 ) ); assertEquals( "a=b", newDn.getUpName() ); } /** * Get the suffix at pos 3 */ @Test public void testLdapDNGetSuffixPos3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 3 ) ); assertEquals( "", newDn.getUpName() ); } /** * Get the suffix out of bound */ @Test public void testLdapDNGetSuffixPos4() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); try { dn.getSuffix( 4 ); // We should not reach this point. fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } /** * Get the suffix of an empty LdapName */ @Test public void testLdapDNGetSuffixEmptyDN() { LdapDN dn = new LdapDN(); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 0 ) ); assertEquals( "", newDn.getUpName() ); } // IS EMPTY operations /** * Test that a LdapDN is empty */ @Test public void testLdapDNIsEmpty() { LdapDN dn = new LdapDN(); assertEquals( true, dn.isEmpty() ); } /** * Test that a LdapDN is empty */ @Test public void testLdapDNNotEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); assertEquals( false, dn.isEmpty() ); } /** * Test that a LdapDN is empty */ @Test public void testLdapDNRemoveIsEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.remove( 0 ); dn.remove( 0 ); assertEquals( true, dn.isEmpty() ); } // STARTS WITH operations /** * Test a startsWith a null LdapDN */ @Test public void testLdapDNStartsWithNull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( null ) ); } /** * Test a startsWith an empty LdapDN */ @Test public void testLdapDNStartsWithEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN() ) ); } /** * Test a startsWith an simple LdapDN */ @Test public void testLdapDNStartsWithSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "e=f" ) ) ); } /** * Test a startsWith a complex LdapDN */ @Test public void testLdapDNStartsWithComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "c = d, e = f" ) ) ); } /** * Test a startsWith a complex LdapDN */ @Test public void testLdapDNStartsWithComplexMixedCase() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.startsWith( new LdapDN( "c = D, E = f" ) ) ); } /** * Test a startsWith a full LdapDN */ @Test public void testLdapDNStartsWithFull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "a= b; c = d, e = f" ) ) ); } /** * Test a startsWith which returns false */ @Test public void testLdapDNStartsWithWrong() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.startsWith( new LdapDN( "c = t, e = f" ) ) ); } // ENDS WITH operations /** * Test a endsWith a null LdapDN */ @Test public void testLdapDNEndsWithNull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( null ) ); } /** * Test a endsWith an empty LdapDN */ @Test public void testLdapDNEndsWithEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN() ) ); } /** * Test a endsWith an simple LdapDN */ @Test public void testLdapDNEndsWithSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a=b" ) ) ); } /** * Test a endsWith a complex LdapDN */ @Test public void testLdapDNEndsWithComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a = b, c = d" ) ) ); } /** * Test a endsWith a complex LdapDN */ @Test public void testLdapDNEndsWithComplexMixedCase() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.endsWith( new LdapDN( "a = B, C = d" ) ) ); } /** * Test a endsWith a full LdapDN */ @Test public void testLdapDNEndsWithFull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a= b; c = d, e = f" ) ) ); } /** * Test a endsWith which returns false */ @Test public void testLdapDNEndsWithWrong() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.endsWith( new LdapDN( "a = b, e = f" ) ) ); } // GET ALL operations /** * test a getAll operation on a null DN */ @Test public void testLdapDNGetAllNull() { LdapDN dn = new LdapDN(); Enumeration<String> nc = dn.getAll(); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on an empty DN */ @Test public void testLdapDNGetAllEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); Enumeration<String> nc = dn.getAll(); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a simple DN */ @Test public void testLdapDNGetAllSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a complex DN */ @Test public void testLdapDNGetAllComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "e=f+g=h,a=b,c=d" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "c=d", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "e=f+g=h", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a complex DN */ @Test public void testLdapDNGetAllComplexOrdered() throws InvalidNameException { LdapDN dn = new LdapDN( "g=h+e=f,a=b,c=d" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "c=d", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); // The lowest atav should be the first one assertEquals( "e=f+g=h", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } // CLONE Operation /** * test a clone operation on a empty DN */ @Test public void testLdapDNCloneEmpty() { LdapDN dn = new LdapDN(); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "", clone.getUpName() ); } /** * test a clone operation on a simple DN */ @Test public void testLdapDNCloneSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "a=b", clone.getUpName() ); dn.remove( 0 ); assertEquals( "a=b", clone.getUpName() ); } /** * test a clone operation on a complex DN */ @Test public void testLdapDNCloneComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "e=f+g=h,a=b,c=d" ); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "e=f+g=h,a=b,c=d", clone.getUpName() ); dn.remove( 2 ); assertEquals( "e=f+g=h,a=b,c=d", clone.getUpName() ); } // GET operations /** * test a get in a null DN */ @Test public void testLdapDNGetNull() { LdapDN dn = new LdapDN(); assertEquals( "", dn.get( 0 ) ); } /** * test a get in an empty DN */ @Test public void testLdapDNGetEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); assertEquals( "", dn.get( 0 ) ); } /** * test a get in a simple DN */ @Test public void testLdapDNGetSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); assertEquals( "a=b", dn.get( 0 ) ); } /** * test a get in a complex DN */ @Test public void testLdapDNGetComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c= d, e= f; g =h" ); assertEquals( "g=h", dn.get( 0 ) ); assertEquals( "e=f", dn.get( 1 ) ); assertEquals( "a=b+c=d", dn.get( 2 ) ); } /** * test a get out of bound */ @Test public void testLdapDNGetOutOfBound() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c= d, e= f; g =h" ); try { dn.get( 4 ); fail(); } catch ( IndexOutOfBoundsException aioob ) { assertTrue( true ); } } /** * Tests the examples from the JNDI tutorials to make sure LdapName behaves * appropriately. The example can be found online <a href="">here</a>. * * @throws Exception * if anything goes wrong */ @Test public void testJNDITutorialExample() throws Exception { // Parse the name Name name = new LdapDN( "cn=John,ou=People,ou=Marketing" ); // Remove the second component from the head: ou=People String out = name.remove( 1 ).toString(); assertEquals( "ou=People", out ); // Add to the head (first): cn=John,ou=Marketing,ou=East out = name.add( 0, "ou=East" ).toString(); assertEquals( "cn=John,ou=Marketing,ou=East", out ); // Add to the tail (last): cn=HomeDir,cn=John,ou=Marketing,ou=East out = name.add( "cn=HomeDir" ).toString(); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", out ); } @Test public void testAttributeEqualsIsCaseInSensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir" ); Name name2 = new LdapDN( "CN=HomeDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testAttributeTypeEqualsIsCaseInsensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=HomeDir+CN=WorkDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testNameEqualsIsInsensitiveToAttributesOrder() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=WorkDir+cn=HomeDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testAttributeComparisonIsCaseInSensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir" ); Name name2 = new LdapDN( "CN=HomeDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testAttributeTypeComparisonIsCaseInsensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=HomeDir+CN=WorkDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testNameComparisonIsInsensitiveToAttributesOrder() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=WorkDir+cn=HomeDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testNameComparisonIsInsensitiveToAttributesOrderFailure() throws Exception { Name name1 = new LdapDN( "cn= HomeDir+cn=Workdir" ); Name name2 = new LdapDN( "cn = Work+cn=HomeDir" ); assertEquals( 1, name1.compareTo( name2 ) ); } /** * Test the encoding of a LdanDN */ @Test public void testNameToBytes() throws Exception { LdapDN dn = new LdapDN( "cn = John, ou = People, OU = Marketing" ); byte[] bytes = LdapDN.getBytes( dn ); assertEquals( 30, LdapDN.getNbBytes( dn ) ); assertEquals( "cn=John,ou=People,ou=Marketing", new String( bytes, "UTF-8" ) ); } @Test public void testStringParser() throws Exception { String dn = StringTools.utf8ToString( new byte[] { 'C', 'N', ' ', '=', ' ', 'E', 'm', 'm', 'a', 'n', 'u', 'e', 'l', ' ', ' ', 'L', ( byte ) 0xc3, ( byte ) 0xa9, 'c', 'h', 'a', 'r', 'n', 'y' } ); Name name = LdapDnParser.getNameParser().parse( dn ); assertEquals( dn, ( ( LdapDN ) name ).getUpName() ); assertEquals( "cn=Emmanuel L\u00E9charny", name.toString() ); } /** * Class to test for void LdapName(String) * * @throws Exception * if anything goes wrong. */ @Test public void testLdapNameString() throws Exception { Name name = new LdapDN( "" ); Name name50 = new LdapDN(); assertEquals( name50, name ); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name copy = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name4 = new LdapDN( "cn=Website,cn=John,ou=Marketing,ou=West" ); Name name5 = new LdapDN( "cn=Airline,cn=John,ou=Marketing,ou=West" ); assertTrue( name0.compareTo( copy ) == 0 ); assertTrue( name0.compareTo( name1 ) < 0 ); assertTrue( name0.compareTo( name2 ) < 0 ); assertTrue( name1.compareTo( name2 ) < 0 ); assertTrue( name2.compareTo( name1 ) > 0 ); assertTrue( name2.compareTo( name0 ) > 0 ); assertTrue( name2.compareTo( name3 ) < 0 ); assertTrue( name2.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name5 ) > 0 ); assertTrue( name4.compareTo( name5 ) > 0 ); assertTrue( name2.compareTo( name5 ) < 0 ); } /** * Class to test for void LdapName() */ @Test public void testLdapName() { Name name = new LdapDN(); assertTrue( name.toString().equals( "" ) ); } /** * Class to test for void LdapName(List) */ @Test public void testLdapNameList() throws InvalidNameException { List<String> list = new ArrayList<String>(); list.add( "ou=People" ); list.add( "dc=example" ); list.add( "dc=com" ); Name name = new LdapDN( list ); assertTrue( name.toString().equals( "ou=People,dc=example,dc=com" ) ); } /** * Class to test for void LdapName(Iterator) */ @Test public void testLdapNameIterator() throws InvalidNameException { List<String> list = new ArrayList<String>(); list.add( "ou=People" ); list.add( "dc=example" ); list.add( "dc=com" ); Name name = new LdapDN( list.iterator() ); assertTrue( name.toString().equals( "ou=People,dc=example,dc=com" ) ); } /** * Class to test for Object clone() * * @throws Exception * if anything goes wrong. */ @Test public void testClone() throws Exception { String strName = "cn=HomeDir,cn=John,ou=Marketing,ou=East"; Name name = new LdapDN( strName ); assertEquals( name, name.clone() ); } /** * Class to test for compareTo * * @throws Exception * if anything goes wrong. */ @Test public void testCompareTo() throws Exception { Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name copy = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name4 = new LdapDN( "cn=Website,cn=John,ou=Marketing,ou=West" ); Name name5 = new LdapDN( "cn=Airline,cn=John,ou=Marketing,ou=West" ); assertTrue( name0.compareTo( copy ) == 0 ); assertTrue( name0.compareTo( name1 ) < 0 ); assertTrue( name0.compareTo( name2 ) < 0 ); assertTrue( name1.compareTo( name2 ) < 0 ); assertTrue( name2.compareTo( name1 ) > 0 ); assertTrue( name2.compareTo( name0 ) > 0 ); assertTrue( name2.compareTo( name3 ) < 0 ); assertTrue( name2.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name5 ) > 0 ); assertTrue( name4.compareTo( name5 ) > 0 ); assertTrue( name2.compareTo( name5 ) < 0 ); List<Name> list = new ArrayList<Name>(); Comparator<Name> comparator = new Comparator<Name>() { public int compare( Name obj1, Name obj2 ) { Name n1 = obj1; Name n2 = obj2; return n1.compareTo( n2 ); } public boolean equals( Object obj ) { return super.equals( obj ); } /** * Compute the instance's hash code * @return the instance's hash code */ public int hashCode() { return super.hashCode(); } }; list.add( name0 ); list.add( name1 ); list.add( name2 ); list.add( name3 ); list.add( name4 ); list.add( name5 ); Collections.sort( list, comparator ); assertEquals( name0, list.get( 0 ) ); assertEquals( name1, list.get( 1 ) ); assertEquals( name2, list.get( 2 ) ); assertEquals( name5, list.get( 3 ) ); assertEquals( name3, list.get( 4 ) ); assertEquals( name4, list.get( 5 ) ); } /** * Class to test for size * * @throws Exception * if anything goes wrong. */ @Test public void testSize() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); assertEquals( 0, name0.size() ); assertEquals( 1, name1.size() ); assertEquals( 2, name2.size() ); assertEquals( 3, name3.size() ); assertEquals( 4, name4.size() ); assertEquals( 5, name5.size() ); assertEquals( 6, name6.size() ); } /** * Class to test for isEmpty * * @throws Exception * if anything goes wrong. */ @Test public void testIsEmpty() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); assertEquals( true, name0.isEmpty() ); assertEquals( false, name1.isEmpty() ); assertEquals( false, name2.isEmpty() ); assertEquals( false, name3.isEmpty() ); assertEquals( false, name4.isEmpty() ); assertEquals( false, name5.isEmpty() ); assertEquals( false, name6.isEmpty() ); } /** * Class to test for getAll * * @throws Exception * if anything goes wrong. */ @Test public void testGetAll() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Enumeration<String> enum0 = name0.getAll(); assertEquals( false, enum0.hasMoreElements() ); Enumeration<String> enum1 = name1.getAll(); assertEquals( true, enum1.hasMoreElements() ); for ( int i = 0; enum1.hasMoreElements(); i++ ) { String element = ( String ) enum1.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } } Enumeration<String> enum2 = name2.getAll(); assertEquals( true, enum2.hasMoreElements() ); for ( int i = 0; enum2.hasMoreElements(); i++ ) { String element = ( String ) enum2.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } } Enumeration<String> enum3 = name3.getAll(); assertEquals( true, enum3.hasMoreElements() ); for ( int i = 0; enum3.hasMoreElements(); i++ ) { String element = ( String ) enum3.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } } Enumeration<String> enum4 = name4.getAll(); assertEquals( true, enum4.hasMoreElements() ); for ( int i = 0; enum4.hasMoreElements(); i++ ) { String element = ( String ) enum4.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } } Enumeration<String> enum5 = name5.getAll(); assertEquals( true, enum5.hasMoreElements() ); for ( int i = 0; enum5.hasMoreElements(); i++ ) { String element = ( String ) enum5.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } if ( i == 4 ) { assertEquals( "cn=Website", element ); } } Enumeration<String> enum6 = name6.getAll(); assertEquals( true, enum6.hasMoreElements() ); for ( int i = 0; enum6.hasMoreElements(); i++ ) { String element = ( String ) enum6.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } if ( i == 4 ) { assertEquals( "cn=Website", element ); } if ( i == 5 ) { assertEquals( "cn=Airline", element ); } } } /** * Class to test for getAllRdn * * @throws Exception * if anything goes wrong. */ @Test public void testGetAllRdn() throws Exception { LdapDN name = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Enumeration<Rdn> rdns = name.getAllRdn(); assertEquals( true, rdns.hasMoreElements() ); for ( int i = 0; rdns.hasMoreElements(); i++ ) { Rdn element = ( Rdn ) rdns.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element.toString() ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element.toString() ); } if ( i == 2 ) { assertEquals( "cn=John", element.toString() ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element.toString() ); } if ( i == 4 ) { assertEquals( "cn=Website", element.toString() ); } if ( i == 5 ) { assertEquals( "cn=Airline", element.toString() ); } } } /** * Test the get( int ) method */ @Test public void testGet() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "cn=HomeDir", name.get( 3 ) ); assertEquals( "cn=John", name.get( 2 ) ); assertEquals( "ou=Marketing", name.get( 1 ) ); assertEquals( "ou=East", name.get( 0 ) ); } /** * Test the getRdn( int ) method */ @Test public void testGetRdn() throws Exception { LdapDN name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "cn=HomeDir", name.getRdn( 3 ).getUpName() ); assertEquals( "cn=John", name.getRdn( 2 ).getUpName() ); assertEquals( "ou=Marketing", name.getRdn( 1 ).getUpName() ); assertEquals( "ou=East", name.getRdn( 0 ).getUpName() ); } /** * Class to test for getSuffix * * @throws Exception * anything goes wrong */ @Test public void testGetXSuffix() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "", name.getSuffix( 4 ).toString() ); assertEquals( "cn=HomeDir", name.getSuffix( 3 ).toString() ); assertEquals( "cn=HomeDir,cn=John", name.getSuffix( 2 ).toString() ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing", name.getSuffix( 1 ).toString() ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.getSuffix( 0 ).toString() ); } /** * Class to test for getPrefix * * @throws Exception * anything goes wrong */ @Test public void testGetPrefix() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.getPrefix( 4 ).toString() ); assertEquals( "cn=John,ou=Marketing,ou=East", name.getPrefix( 3 ).toString() ); assertEquals( "ou=Marketing,ou=East", name.getPrefix( 2 ).toString() ); assertEquals( "ou=East", name.getPrefix( 1 ).toString() ); assertEquals( "", name.getPrefix( 0 ).toString() ); } /** * Class to test for startsWith * * @throws Exception * anything goes wrong */ @Test public void testStartsWith() throws Exception { Name n0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name n1 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name n2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name n3 = new LdapDN( "ou=Marketing,ou=East" ); Name n4 = new LdapDN( "ou=East" ); Name n5 = new LdapDN( "" ); Name n6 = new LdapDN( "cn=HomeDir" ); Name n7 = new LdapDN( "cn=HomeDir,cn=John" ); Name n8 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing" ); // Check with LdapDN assertTrue( n0.startsWith( n1 ) ); assertTrue( n0.startsWith( n2 ) ); assertTrue( n0.startsWith( n3 ) ); assertTrue( n0.startsWith( n4 ) ); assertTrue( n0.startsWith( n5 ) ); assertTrue( !n0.startsWith( n6 ) ); assertTrue( !n0.startsWith( n7 ) ); assertTrue( !n0.startsWith( n8 ) ); Name nn0 = new LdapDN( "cn=zero" ); Name nn10 = new LdapDN( "cn=one,cn=zero" ); Name nn210 = new LdapDN( "cn=two,cn=one,cn=zero" ); Name nn3210 = new LdapDN( "cn=three,cn=two,cn=one,cn=zero" ); assertTrue( nn0.startsWith( nn0 ) ); assertTrue( nn10.startsWith( nn0 ) ); assertTrue( nn210.startsWith( nn0 ) ); assertTrue( nn3210.startsWith( nn0 ) ); assertTrue( nn10.startsWith( nn10 ) ); assertTrue( nn210.startsWith( nn10 ) ); assertTrue( nn3210.startsWith( nn10 ) ); assertTrue( nn210.startsWith( nn210 ) ); assertTrue( nn3210.startsWith( nn210 ) ); assertTrue( nn3210.startsWith( nn3210 ) ); // Check with LdapName Name name0 = new LdapName( "cn=zero" ); Name name10 = new LdapName( "cn=one,cn=zero" ); Name name210 = new LdapName( "cn=two,cn=one,cn=zero" ); Name name3210 = new LdapName( "cn=three,cn=two,cn=one,cn=zero" ); // Check with Name assertTrue( nn0.startsWith( name0 ) ); assertTrue( nn10.startsWith( name0 ) ); assertTrue( nn210.startsWith( name0 ) ); assertTrue( nn3210.startsWith( name0 ) ); assertTrue( nn10.startsWith( name10 ) ); assertTrue( nn210.startsWith( name10 ) ); assertTrue( nn3210.startsWith( name10 ) ); assertTrue( nn210.startsWith( name210 ) ); assertTrue( nn3210.startsWith( name210 ) ); assertTrue( nn3210.startsWith( name3210 ) ); assertTrue( "Starting DN fails with ADS LdapDN", new LdapDN( "ou=foo,dc=apache,dc=org" ).startsWith( new LdapDN( "dc=apache,dc=org" ) ) ); assertTrue( "Starting DN fails with Java LdapName", new LdapDN( "ou=foo,dc=apache,dc=org" ).startsWith( new LdapName( "dc=apache,dc=org" ) ) ); assertTrue( "Starting DN fails with Java LdapName", new LdapDN( "dc=apache,dc=org" ).startsWith( new LdapName( "dc=apache,dc=org" ) ) ); } /** * Class to test for endsWith * * @throws Exception * anything goes wrong */ @Test public void testEndsWith() throws Exception { Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); Name name4 = new LdapDN( "ou=East" ); Name name5 = new LdapDN( "" ); Name name6 = new LdapDN( "cn=HomeDir" ); Name name7 = new LdapDN( "cn=HomeDir,cn=John" ); Name name8 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing" ); assertTrue( name0.endsWith( name1 ) ); assertTrue( !name0.endsWith( name2 ) ); assertTrue( !name0.endsWith( name3 ) ); assertTrue( !name0.endsWith( name4 ) ); assertTrue( name0.endsWith( name5 ) ); assertTrue( name0.endsWith( name6 ) ); assertTrue( name0.endsWith( name7 ) ); assertTrue( name0.endsWith( name8 ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllName0() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllNameExisting0() throws Exception { Name name1 = new LdapDN( "ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John" ); Name nameAdded = new LdapDN( "cn=HomeDir,cn=John, ou=Marketing,ou=East" ); assertTrue( nameAdded.equals( name1.addAll( name2 ) ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllName1() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( name1 ) ) ); } /** * Class to test for Name addAll(int, Name) * * @throws Exception * when something goes wrong */ @Test public void testAddAllintName0() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( 2, name1 ) ) ); } /** * Class to test for Name addAll(int, Name) * * @throws Exception * when something goes wrong */ @Test public void testAddAllintName1() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "cn=HomeDir,ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( 2, name1 ) ) ); Name name3 = new LdapDN( "cn=Airport" ); Name name4 = new LdapDN( "cn=Airport,cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name4.equals( name.addAll( 4, name3 ) ) ); Name name5 = new LdapDN( "cn=ABC123" ); Name name6 = new LdapDN( "cn=Airport,cn=HomeDir,cn=ABC123,cn=John,ou=Marketing,ou=East" ); assertTrue( name6.equals( name.addAll( 3, name5 ) ) ); } /** * Class to test for Name add(String) * * @throws Exception * when something goes wrong */ @Test public void testAddString() throws Exception { Name name = new LdapDN(); assertEquals( name, new LdapDN( "" ) ); Name name4 = new LdapDN( "ou=East" ); name.add( "ou=East" ); assertEquals( name4, name ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); name.add( "ou=Marketing" ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); name.add( "cn=John" ); assertEquals( name2, name ); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); name.add( "cn=HomeDir" ); assertEquals( name0, name ); } /** * Class to test for Name add(int, String) * * @throws Exception * if anything goes wrong */ @Test public void testAddintString() throws Exception { Name name = new LdapDN(); assertEquals( name, new LdapDN( "" ) ); Name name4 = new LdapDN( "ou=East" ); name.add( "ou=East" ); assertEquals( name4, name ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); name.add( 1, "ou=Marketing" ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); name.add( 2, "cn=John" ); assertEquals( name2, name ); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); name.add( 3, "cn=HomeDir" ); assertEquals( name0, name ); Name name5 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East,o=LL " + "Bean Inc." ); name.add( 0, "o=LL Bean Inc." ); assertEquals( name5, name ); Name name6 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East,c=US,o=LL " + "Bean Inc." ); name.add( 1, "c=US" ); assertEquals( name6, name ); Name name7 = new LdapDN( "cn=HomeDir,cn=John,ou=Advertising,ou=Marketing," + "ou=East,c=US,o=LL " + "Bean Inc." ); name.add( 4, "ou=Advertising" ); assertEquals( name7, name ); } /** * Class to test for remove * * @throws Exception * if anything goes wrong */ @Test public void testRemove() throws Exception { Name name = new LdapDN(); assertEquals( new LdapDN( "" ), name ); Name name3 = new LdapDN( "ou=Marketing" ); name.add( "ou=East" ); name.add( 1, "ou=Marketing" ); name.remove( 0 ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=HomeDir,ou=Marketing,ou=East" ); name.add( 0, "ou=East" ); name.add( 2, "cn=John" ); name.add( "cn=HomeDir" ); name.remove( 2 ); assertEquals( name2, name ); name.remove( 1 ); Name name1 = new LdapDN( "cn=HomeDir,ou=East" ); assertEquals( name1, name ); name.remove( 1 ); Name name0 = new LdapDN( "ou=East" ); assertEquals( name0, name ); name.remove( 0 ); assertEquals( new LdapDN( "" ), name ); } /** * Class to test for String toString() * * @throws Exception * if anything goes wrong */ @Test public void testToString() throws Exception { Name name = new LdapDN(); assertEquals( "", name.toString() ); name.add( "ou=East" ); assertEquals( "ou=East", name.toString() ); name.add( 1, "ou=Marketing" ); assertEquals( "ou=Marketing,ou=East", name.toString() ); name.add( "cn=John" ); assertEquals( "cn=John,ou=Marketing,ou=East", name.toString() ); name.add( "cn=HomeDir" ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.toString() ); } /** * Class to test for boolean equals(Object) * * @throws Exception * if anything goes wrong */ @Test public void testEqualsObject() throws Exception { assertTrue( new LdapDN( "ou=People" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=People,dc=example,dc=com" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=people" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=Groups" ).equals( new LdapDN( "ou=People" ) ) ); } @Test public void testNameFrenchChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', 0x4A, ( byte ) 0xC3, ( byte ) 0xA9, 0x72, ( byte ) 0xC3, ( byte ) 0xB4, 0x6D, 0x65 }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=J\u00e9r\u00f4me", name.toString() ); } @Test public void testNameGermanChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', ( byte ) 0xC3, ( byte ) 0x84, ( byte ) 0xC3, ( byte ) 0x96, ( byte ) 0xC3, ( byte ) 0x9C, ( byte ) 0xC3, ( byte ) 0x9F, ( byte ) 0xC3, ( byte ) 0xA4, ( byte ) 0xC3, ( byte ) 0xB6, ( byte ) 0xC3, ( byte ) 0xBC }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=\u00C4\u00D6\u00DC\u00DF\u00E4\u00F6\u00FC", name.toString() ); } @Test public void testNameTurkishChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', ( byte ) 0xC4, ( byte ) 0xB0, ( byte ) 0xC4, ( byte ) 0xB1, ( byte ) 0xC5, ( byte ) 0x9E, ( byte ) 0xC5, ( byte ) 0x9F, ( byte ) 0xC3, ( byte ) 0x96, ( byte ) 0xC3, ( byte ) 0xB6, ( byte ) 0xC3, ( byte ) 0x9C, ( byte ) 0xC3, ( byte ) 0xBC, ( byte ) 0xC4, ( byte ) 0x9E, ( byte ) 0xC4, ( byte ) 0x9F }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=\u0130\u0131\u015E\u015F\u00D6\u00F6\u00DC\u00FC\u011E\u011F", name.toString() ); } /** * Class to test for toOid( Name, Map) */ @Test public void testLdapNameToName() throws Exception { List<String> list = new ArrayList<String>(); list.add( "ou= Some People " ); list.add( "dc = eXample" ); list.add( "dc= cOm" ); LdapDN name = new LdapDN( list.iterator() ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Name result = LdapDN.normalize( name, oids ); assertTrue( result.toString().equals( "ou=some people,dc=example,dc=com" ) ); } @Test public void testRdnGetTypeUpName() throws Exception { List<String> list = new ArrayList<String>(); list.add( "ou= Some People " ); list.add( "dc = eXample" ); list.add( "dc= cOm" ); LdapDN name = new LdapDN( list.iterator() ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Rdn rdn = name.getRdn(); assertEquals( "ou= Some People ", rdn.getUpName() ); assertEquals( "ou", rdn.getNormType() ); assertEquals( "ou", rdn.getUpType() ); LdapDN result = LdapDN.normalize( name, oidOids ); assertTrue( result.getNormName().equals( "2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Rdn rdn2 = result.getRdn(); assertEquals( "ou= Some People ", rdn2.getUpName() ); assertEquals( "2.5.4.11", rdn2.getNormType() ); assertEquals( "ou", rdn2.getUpType() ); } /** * Class to test for toOid( Name, Map) with a NULL dn */ @Test public void testLdapNameToNameEmpty() throws Exception { LdapDN name = new LdapDN(); Name result = LdapDN.normalize( name, oids ); assertTrue( result.toString().equals( "" ) ); } /** * Class to test for toOid( Name, Map) with a multiple NameComponent */ @Test public void testLdapNameToNameMultiNC() throws Exception { LdapDN name = new LdapDN( "2.5.4.11= Some People + 0.9.2342.19200300.100.1.25= And Some anImAls,0.9.2342.19200300.100.1.25 = eXample,dc= cOm" ); Name result = LdapDN.normalize( name, oidOids ); assertEquals( result.toString(), "0.9.2342.19200300.100.1.25=and some animals+2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ); assertTrue( ( ( LdapDN ) result ) .getUpName() .equals( "2.5.4.11= Some People + 0.9.2342.19200300.100.1.25= And Some anImAls,0.9.2342.19200300.100.1.25 = eXample,dc= cOm" ) ); } /** * Class to test for toOid( Name, Map) with a multiple NameComponent */ @Test public void testLdapNameToNameAliasMultiNC() throws Exception { LdapDN name = new LdapDN( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm" ); LdapDN result = LdapDN.normalize( name, oidOids ); assertTrue( result .toString() .equals( "0.9.2342.19200300.100.1.25=and some animals+2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); assertTrue( result .getUpName() .equals( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm" ) ); } /** * Class to test for hashCode(). */ @Test public void testLdapNameHashCode() throws Exception { Name name1 = LdapDN .normalize( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm", oids ); Name name2 = LdapDN .normalize( "2.5.4.11=some people+domainComponent=and some animals,DomainComponent=example,0.9.2342.19200300.100.1.25=com", oids ); assertEquals( name1.hashCode(), name2.hashCode() ); } /** * Test for DIRSERVER-191 */ @Test public void testName() throws NamingException { Name jName = new javax.naming.ldap.LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.toString(), "cn=four,cn=three,cn=two,cn=one" ); assertEquals( aName.toString(), "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetPrefixName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.getPrefix( 0 ).toString(), aName.getPrefix( 0 ).toString() ); assertEquals( jName.getPrefix( 1 ).toString(), aName.getPrefix( 1 ).toString() ); assertEquals( jName.getPrefix( 2 ).toString(), aName.getPrefix( 2 ).toString() ); assertEquals( jName.getPrefix( 3 ).toString(), aName.getPrefix( 3 ).toString() ); assertEquals( jName.getPrefix( 4 ).toString(), aName.getPrefix( 4 ).toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetSuffix() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.getSuffix( 0 ).toString(), aName.getSuffix( 0 ).toString() ); assertEquals( jName.getSuffix( 1 ).toString(), aName.getSuffix( 1 ).toString() ); assertEquals( jName.getSuffix( 2 ).toString(), aName.getSuffix( 2 ).toString() ); assertEquals( jName.getSuffix( 3 ).toString(), aName.getSuffix( 3 ).toString() ); assertEquals( jName.getSuffix( 4 ).toString(), aName.getSuffix( 4 ).toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddStringName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.add( "cn=five" ) ); assertSame( aName, aName.add( "cn=five" ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddIntString() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.add( 0, "cn=zero" ) ); assertSame( aName, aName.add( 0, "cn=zero" ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.add( 2, "cn=one.5" ) ); assertSame( aName, aName.add( 2, "cn=one.5" ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.add( jName.size(), "cn=five" ) ); assertSame( aName, aName.add( aName.size(), "cn=five" ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddAllName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.addAll( new LdapName( "cn=seven,cn=six" ) ) ); assertSame( aName, aName.addAll( new LdapDN( "cn=seven,cn=six" ) ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddAllIntName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.addAll( 0, new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( 0, new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.addAll( 2, new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( 2, new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.addAll( jName.size(), new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( aName.size(), new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testStartsWithName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.startsWith( new LdapName( "cn=seven,cn=six,cn=five" ) ), aName.startsWith( new LdapDN( "cn=seven,cn=six,cn=five" ) ) ); assertEquals( jName.startsWith( new LdapName( "cn=three,cn=two,cn=one" ) ), aName.startsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); } /** * Test for DIRSERVER-191 */ @Test public void testEndsWithName() throws NamingException { Name name0 = new LdapName( "cn=zero" ); Name name10 = new LdapName( "cn=one,cn=zero" ); Name name210 = new LdapName( "cn=two,cn=one,cn=zero" ); Name name3210 = new LdapName( "cn=three,cn=two,cn=one,cn=zero" ); Name name321 = new LdapName( "cn=three,cn=two,cn=one" ); Name name32 = new LdapName( "cn=three,cn=two" ); Name name3 = new LdapName( "cn=three" ); Name name21 = new LdapName( "cn=two,cn=one" ); Name name2 = new LdapName( "cn=two" ); Name name1 = new LdapName( "cn=one" ); // Check with Name assertTrue( name0.startsWith( name0 ) ); assertTrue( name10.startsWith( name0 ) ); assertTrue( name210.startsWith( name0 ) ); assertTrue( name3210.startsWith( name0 ) ); assertTrue( name10.startsWith( name10 ) ); assertTrue( name210.startsWith( name10 ) ); assertTrue( name3210.startsWith( name10 ) ); assertTrue( name210.startsWith( name210 ) ); assertTrue( name3210.startsWith( name210 ) ); assertTrue( name3210.startsWith( name3210 ) ); assertTrue( name3210.endsWith( name3 ) ); assertTrue( name3210.endsWith( name32 ) ); assertTrue( name3210.endsWith( name321 ) ); assertTrue( name3210.endsWith( name3210 ) ); assertTrue( name210.endsWith( name2 ) ); assertTrue( name210.endsWith( name21 ) ); assertTrue( name210.endsWith( name210 ) ); assertTrue( name10.endsWith( name1 ) ); assertTrue( name10.endsWith( name10 ) ); assertTrue( name0.endsWith( name0 ) ); // Check with DN Name n0 = new LdapDN( "cn=zero" ); Name n10 = new LdapDN( "cn=one,cn=zero" ); Name n210 = new LdapDN( "cn=two,cn=one,cn=zero" ); Name n3210 = new LdapDN( "cn=three,cn=two,cn=one,cn=zero" ); Name n321 = new LdapDN( "cn=three,cn=two,cn=one" ); Name n32 = new LdapDN( "cn=three,cn=two" ); Name n3 = new LdapDN( "cn=three" ); Name n21 = new LdapDN( "cn=two,cn=one" ); Name n2 = new LdapDN( "cn=two" ); Name n1 = new LdapDN( "cn=one" ); assertTrue( n3210.endsWith( n3 ) ); assertTrue( n3210.endsWith( n32 ) ); assertTrue( n3210.endsWith( n321 ) ); assertTrue( n3210.endsWith( n3210 ) ); assertTrue( n210.endsWith( n2 ) ); assertTrue( n210.endsWith( n21 ) ); assertTrue( n210.endsWith( n210 ) ); assertTrue( n10.endsWith( n1 ) ); assertTrue( n10.endsWith( n10 ) ); assertTrue( n0.endsWith( n0 ) ); // Check with DN/Name now assertTrue( n3210.endsWith( name3 ) ); assertTrue( n3210.endsWith( name32 ) ); assertTrue( n3210.endsWith( name321 ) ); assertTrue( n3210.endsWith( name3210 ) ); assertTrue( n210.endsWith( name2 ) ); assertTrue( n210.endsWith( name21 ) ); assertTrue( n210.endsWith( name210 ) ); assertTrue( n10.endsWith( name1 ) ); assertTrue( n10.endsWith( name10 ) ); assertTrue( n0.endsWith( name0 ) ); Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.endsWith( new LdapName( "cn=seven,cn=six,cn=five" ) ), aName.endsWith( new LdapDN( "cn=seven,cn=six,cn=five" ) ) ); assertEquals( jName.endsWith( new LdapName( "cn=three,cn=two,cn=one" ) ), aName.endsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); assertEquals( jName.endsWith( new LdapName( "cn=two,cn=one" ) ), aName.endsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); assertTrue( aName.endsWith( new LdapName( "cn=four,cn=three" ) ) ); } /** * Test for DIRSERVER-191 */ @Test public void testRemoveName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.remove( 0 ).toString(), aName.remove( 0 ).toString() ); assertEquals( jName.toString(), aName.toString() ); assertEquals( jName.remove( jName.size() - 1 ).toString(), aName.remove( aName.size() - 1 ).toString() ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetAllName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); Enumeration<String> j = jName.getAll(); Enumeration<String> a = aName.getAll(); while ( j.hasMoreElements() ) { assertTrue( j.hasMoreElements() ); assertEquals( j.nextElement(), a.nextElement() ); } } /** * Test for DIRSERVER-642 * @throws NamingException */ @Test public void testDoubleQuoteInNameDIRSERVER_642() throws NamingException { Name name1 = new LdapDN( "cn=\"Kylie Minogue\",dc=example,dc=com" ); Name name2 = new LdapName( "cn=\"Kylie Minogue\",dc=example,dc=com" ); Enumeration<String> j = name1.getAll(); Enumeration<String> a = name2.getAll(); while ( j.hasMoreElements() ) { assertTrue( j.hasMoreElements() ); assertEquals( j.nextElement(), a.nextElement() ); } } /** * Test for DIRSERVER-642 * @throws NamingException */ @Test public void testDoubleQuoteInNameDIRSERVER_642_1() throws NamingException { LdapDN dn = new LdapDN( "cn=\" Kylie Minogue \",dc=example,dc=com" ); assertEquals( "cn=\" Kylie Minogue \",dc=example,dc=com", dn.getUpName() ); assertEquals( "cn=\\ Kylie Minogue\\ ,dc=example,dc=com", dn.toString() ); } /** * Test for DIRSTUDIO-250 * @throws NamingException */ @Test public void testDoubleQuoteWithSpecialCharsInNameDIRSERVER_250() throws NamingException { LdapDN dn = new LdapDN( "a=\"b,c\"" ); assertEquals( "a=\"b,c\"", dn.getUpName() ); assertEquals( "a=b\\,c", dn.toString() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testLeadingAndTrailingSpacesDIRSERVER_184() throws NamingException { LdapDN name = new LdapDN( "dn= \\ four spaces leading and 3 trailing \\ " ); assertEquals( "dn=\\ four spaces leading and 3 trailing \\ ", name.toString() ); assertEquals( "dn= \\ four spaces leading and 3 trailing \\ ", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_1() { try { new LdapDN( "dn=middle\\ spaces" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_2() { try { new LdapDN( "dn=# a leading pound" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_3() throws NamingException { LdapDN name = new LdapDN( "dn=\\# a leading pound" ); assertEquals( "dn=\\# a leading pound", name.toString() ); assertEquals( "dn=\\# a leading pound", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_4() throws NamingException { LdapDN name = new LdapDN( "dn=a middle \\# pound" ); assertEquals( "dn=a middle # pound", name.toString() ); assertEquals( "dn=a middle \\# pound", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_5() throws NamingException { LdapDN name = new LdapDN( "dn=a trailing pound \\#" ); assertEquals( "dn=a trailing pound #", name.toString() ); assertEquals( "dn=a trailing pound \\#", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_6() { try { new LdapDN( "dn=a middle # pound" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_7() { try { new LdapDN( "dn=a trailing pound #" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } @Test public void testDIRSERVER_631_1() throws NamingException { LdapDN name = new LdapDN( "cn=Bush\\, Kate,dc=example,dc=com" ); assertEquals( "cn=Bush\\, Kate,dc=example,dc=com", name.toString() ); assertEquals( "cn=Bush\\, Kate,dc=example,dc=com", name.getUpName() ); } /** * Added a test to check the parsing of a DN with more than one RDN * which are OIDs, and with one RDN which has more than one atav. * @throws NamingException */ @Test public void testDNWithMultiOidsRDN() throws NamingException { LdapDN name = new LdapDN( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us" ); assertEquals( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us", name .toString() ); assertEquals( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us", name .getUpName() ); } @Test public void testNameAddAll() throws NamingException { Properties props = new Properties(); props.setProperty( "jndi.syntax.direction", "right_to_left" ); props.setProperty( "jndi.syntax.separator", "," ); props.setProperty( "jndi.syntax.ignorecase", "true" ); props.setProperty( "jndi.syntax.trimblanks", "true" ); Name dn = new CompoundName( "cn=blah,dc=example,dc=com", props ); LdapDN ldapDn = new LdapDN(); ldapDn.addAll( 0, dn ); assertEquals( "cn=blah,dc=example,dc=com", ldapDn.toString() ); dn = new CompoundName( "cn=blah,dc=example,dc=com", props ); ldapDn = new LdapDN( "cn=xyz" ); ldapDn.addAll( 0, dn ); assertEquals( "cn=xyz,cn=blah,dc=example,dc=com", ldapDn.toString() ); } @Test public void testDNEquals() throws NamingException { LdapDN dn1 = new LdapDN( "a=b,c=d,e=f" ); LdapDN dn2 = new LdapDN( "a=b\\,c\\=d,e=f" ); assertFalse( dn1.toString().equals( dn2.toString() ) ); } @Test public void testDNAddEmptyString() throws NamingException { LdapDN dn = new LdapDN(); assertTrue( dn.size() == 0 ); assertTrue( dn.add( "" ).size() == 0 ); } /** * This leads to the bug in DIRSERVER-832. */ @Test public void testPreserveAttributeIdCase() throws NamingException { LdapDN dn = new LdapDN( "uID=kevin" ); assertEquals( "uID", dn.getRdn().getUpType() ); } /** * Tests the LdapDN.isValid() method. */ @Test public void testIsValid() { assertTrue( LdapDN.isValid( "" ) ); assertFalse( LdapDN.isValid( "a" ) ); assertFalse( LdapDN.isValid( "a " ) ); assertTrue( LdapDN.isValid( "a=" ) ); assertTrue( LdapDN.isValid( "a= " ) ); assertFalse( LdapDN.isValid( "=" ) ); assertFalse( LdapDN.isValid( " = " ) ); assertFalse( LdapDN.isValid( " = a" ) ); } private ByteArrayOutputStream serializeDN( LdapDN dn ) throws IOException { ObjectOutputStream oOut = null; ByteArrayOutputStream out = new ByteArrayOutputStream(); try { oOut = new ObjectOutputStream( out ); oOut.writeObject( dn ); } catch ( IOException ioe ) { throw ioe; } finally { try { if ( oOut != null ) { oOut.flush(); oOut.close(); } } catch ( IOException ioe ) { throw ioe; } } return out; } private LdapDN deserializeDN( ByteArrayOutputStream out ) throws IOException, ClassNotFoundException { ObjectInputStream oIn = null; ByteArrayInputStream in = new ByteArrayInputStream( out.toByteArray() ); try { oIn = new ObjectInputStream( in ); LdapDN dn = ( LdapDN ) oIn.readObject(); return dn; } catch ( IOException ioe ) { throw ioe; } finally { try { if ( oIn != null ) { oIn.close(); } } catch ( IOException ioe ) { throw ioe; } } } /** * Test the serialization of a DN * * @throws Exception */ @Test public void testNameSerialization() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } @Test public void testSerializeEmptyDN() throws Exception { LdapDN dn = LdapDN.EMPTY_LDAPDN; assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } /** * Test the serialization of a DN * * @throws Exception */ @Test public void testNameStaticSerialization() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); LdapDNSerializer.serialize( dn, out ); out.flush(); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); assertEquals( dn, LdapDNSerializer.deserialize( in ) ); } /* @Test public void testSerializationPerfs() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); long t0 = System.currentTimeMillis(); for ( int i = 0; i < 1000; i++ ) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); DnSerializer.serialize( dn, out ); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); LdapDN dn1 = DnSerializer.deserialize( in ); } long t1 = System.currentTimeMillis(); System.out.println( "delta :" + ( t1 - t0) ); long t2 = System.currentTimeMillis(); for ( int i = 0; i < 1000000; i++ ) { //ByteArrayOutputStream baos = new ByteArrayOutputStream(); //ObjectOutputStream out = new ObjectOutputStream( baos ); //DnSerializer.serializeString( dn, out ); //byte[] data = baos.toByteArray(); //ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); //LdapDN dn1 = DnSerializer.deserializeString( in, oids ); dn.normalize( oids ); } long t3 = System.currentTimeMillis(); System.out.println( "delta :" + ( t3 - t2) ); //assertEquals( dn, DnSerializer.deserialize( in ) ); } */ @Test public void testStaticSerializeEmptyDN() throws Exception { LdapDN dn = LdapDN.EMPTY_LDAPDN; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); LdapDNSerializer.serialize( dn, out ); out.flush(); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); assertEquals( dn, LdapDNSerializer.deserialize( in ) ); assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } @Test public void testCompositeRDN() throws InvalidNameException { assertTrue( LdapDN.isValid( "a=b+c=d+e=f,g=h" ) ); LdapDN dn = new LdapDN( "a=b+c=d+e=f,g=h" ); assertEquals( "a=b+c=d+e=f,g=h", dn.toString() ); } @Test public void testCompositeRDNOids() throws InvalidNameException { assertTrue( LdapDN.isValid( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); LdapDN dn = new LdapDN( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ); assertEquals( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com", dn.toString() ); } /** * Tests that AttributeTypeAndValues are correctly trimmed. */ @Test public void testTrimAtavs() throws InvalidNameException { // antlr parser: string value with trailing spaces LdapDN dn1 = new LdapDN( " cn = Amos\\,Tori , ou=system " ); assertEquals( " cn = Amos\\,Tori ", dn1.getRdn().getUpName() ); AttributeTypeAndValue atav1 = dn1.getRdn().getAtav(); assertEquals( "cn", atav1.getUpType() ); assertEquals( "Amos\\,Tori", atav1.getUpValue().getString() ); // antlr parser: hexstring with trailing spaces LdapDN dn3 = new LdapDN( " cn = #414243 , ou=system " ); assertEquals( " cn = #414243 ", dn3.getRdn().getUpName() ); AttributeTypeAndValue atav3 = dn3.getRdn().getAtav(); assertEquals( "cn", atav3.getUpType() ); assertEquals( "#414243", atav3.getUpValue().getString() ); assertTrue( Arrays.equals( StringTools.getBytesUtf8( "ABC" ),atav3.getNormValue().getBytes() ) ); // antlr parser: LdapDN dn4 = new LdapDN( " cn = \\41\\42\\43 , ou=system " ); assertEquals( " cn = \\41\\42\\43 ", dn4.getRdn().getUpName() ); AttributeTypeAndValue atav4 = dn4.getRdn().getAtav(); assertEquals( "cn", atav4.getUpType() ); assertEquals( "\\41\\42\\43", atav4.getUpValue().getString() ); assertEquals( "ABC", atav4.getNormValue().getString() ); // antlr parser: quotestring with trailing spaces LdapDN dn5 = new LdapDN( " cn = \"ABC\" , ou=system " ); assertEquals( " cn = \"ABC\" ", dn5.getRdn().getUpName() ); AttributeTypeAndValue atav5 = dn5.getRdn().getAtav(); assertEquals( "cn", atav5.getUpType() ); assertEquals( "\"ABC\"", atav5.getUpValue().getString() ); assertEquals( "ABC", atav5.getNormValue().getString() ); // fast parser: string value with trailing spaces LdapDN dn2 = new LdapDN( " cn = Amos Tori , ou=system " ); assertEquals( " cn = Amos Tori ", dn2.getRdn().getUpName() ); AttributeTypeAndValue atav2 = dn2.getRdn().getAtav(); assertEquals( "cn", atav2.getUpType() ); assertEquals( "Amos Tori", atav2.getUpValue().getString() ); } }
ldap/src/test/java/org/apache/directory/shared/ldap/name/LdapDNTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.shared.ldap.name; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import javax.naming.CompoundName; import javax.naming.InvalidNameException; import javax.naming.Name; import javax.naming.NamingException; import javax.naming.ldap.LdapName; import org.apache.directory.shared.ldap.name.LdapDN; import org.apache.directory.shared.ldap.name.LdapDnParser; import org.apache.directory.shared.ldap.name.Rdn; import org.apache.directory.shared.ldap.schema.normalizers.DeepTrimToLowerNormalizer; import org.apache.directory.shared.ldap.schema.normalizers.OidNormalizer; import org.apache.directory.shared.ldap.util.StringTools; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.fail; import static org.junit.Assert.assertSame; /** * Test the class LdapDN * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> * @version $Rev$, $Date$, */ public class LdapDNTest { private Map<String, OidNormalizer> oids; private Map<String, OidNormalizer> oidOids; /** * Initialize OIDs maps for normalization */ @Before public void initMapOids() { oids = new HashMap<String, OidNormalizer>(); oids.put( "dc", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "domaincomponent", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "0.9.2342.19200300.100.1.25", new OidNormalizer( "dc", new DeepTrimToLowerNormalizer() ) ); oids.put( "ou", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); oids.put( "organizationalUnitName", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); oids.put( "2.5.4.11", new OidNormalizer( "ou", new DeepTrimToLowerNormalizer() ) ); // Another map where we store OIDs instead of names. oidOids = new HashMap<String, OidNormalizer>(); oidOids.put( "dc", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "domaincomponent", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "0.9.2342.19200300.100.1.25", new OidNormalizer( "0.9.2342.19200300.100.1.25", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "ou", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "organizationalUnitName", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); oidOids.put( "2.5.4.11", new OidNormalizer( "2.5.4.11", new DeepTrimToLowerNormalizer() ) ); } // ~ Methods // ------------------------------------------------------------------------------------ // CONSTRUCTOR functions -------------------------------------------------- /** * Test a null DN */ @Test public void testLdapDNNull() { LdapDN dn = new LdapDN(); assertEquals( "", dn.getUpName() ); assertTrue( dn.isEmpty() ); } /** * test an empty DN */ @Test public void testLdapDNEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); assertEquals( "", dn.getUpName() ); assertTrue( dn.isEmpty() ); } /** * test a simple DN : a = b */ @Test public void testLdapDNSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); assertTrue( LdapDN.isValid( "a = b" ) ); assertEquals( "a = b", dn.getUpName() ); assertEquals( "a=b", dn.toString() ); } /** * test a simple DN with some spaces : "a = b " */ @Test public void testLdapDNSimpleWithSpaces() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b " ); assertTrue( LdapDN.isValid( "a = b " ) ); assertEquals( "a = b ", dn.getUpName() ); assertEquals( "a=b", dn.toString() ); } /** * test a composite DN : a = b, d = e */ @Test public void testLdapDNComposite() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b, c = d" ); assertTrue( LdapDN.isValid( "a = b, c = d" ) ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a = b, c = d", dn.getUpName() ); } /** * test a composite DN with spaces : a = b , d = e */ @Test public void testLdapDNCompositeWithSpaces() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b , c = d" ); assertTrue( LdapDN.isValid( "a = b , c = d" ) ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a = b , c = d", dn.getUpName() ); } /** * test a composite DN with or without spaces: a=b, a =b, a= b, a = b, a = b */ @Test public void testLdapDNCompositeWithSpace() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, a =b, a= b, a = b, a = b" ); assertTrue( LdapDN.isValid( "a=b, a =b, a= b, a = b, a = b" ) ); assertEquals( "a=b,a=b,a=b,a=b,a=b", dn.toString() ); assertEquals( "a=b, a =b, a= b, a = b, a = b", dn.getUpName() ); } /** * test a composite DN with differents separators : a=b;c=d,e=f It should * return a=b,c=d,e=f (the ';' is replaced by a ',') */ @Test public void testLdapDNCompositeSepators() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b;c=d,e=f" ); assertTrue( LdapDN.isValid( "a=b;c=d,e=f" ) ); assertEquals( "a=b,c=d,e=f", dn.toString() ); assertEquals( "a=b;c=d,e=f", dn.getUpName() ); } /** * test a simple DN with multiple NameComponents : a = b + c = d */ @Test public void testLdapDNSimpleMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c = d" ); assertTrue( LdapDN.isValid( "a = b + c = d" ) ); assertEquals( "a=b+c=d", dn.toString() ); assertEquals( "a = b + c = d", dn.getUpName() ); } /** * test a composite DN with multiple NC and separators : a=b+c=d, e=f + g=h + * i=j */ @Test public void testLdapDNCompositeMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b+c=d, e=f + g=h + i=j" ); assertTrue( LdapDN.isValid( "a=b+c=d, e=f + g=h + i=j" ) ); assertEquals( "a=b+c=d,e=f+g=h+i=j", dn.toString() ); assertEquals( "a=b+c=d, e=f + g=h + i=j", dn.getUpName() ); } /** * Test to see if a DN with multiRdn values is preserved after an addAll. */ @Test public void testAddAllWithMultivaluedAttribute() throws InvalidNameException { LdapDN dn = new LdapDN( "cn=Kate Bush+sn=Bush,ou=system" ); LdapDN target = new LdapDN(); assertTrue( LdapDN.isValid( "cn=Kate Bush+sn=Bush,ou=system" ) ); target.addAll( target.size(), dn ); assertEquals( "cn=Kate Bush+sn=Bush,ou=system", target.toString() ); assertEquals( "cn=Kate Bush+sn=Bush,ou=system", target.getUpName() ); } /** * test a simple DN with an oid prefix (uppercase) : OID.12.34.56 = azerty */ @Test public void testLdapDNOidUpper() throws InvalidNameException { LdapDN dn = new LdapDN( "OID.12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "OID.12.34.56 = azerty" ) ); assertEquals( "oid.12.34.56=azerty", dn.toString() ); assertEquals( "OID.12.34.56 = azerty", dn.getUpName() ); } /** * test a simple DN with an oid prefix (lowercase) : oid.12.34.56 = azerty */ @Test public void testLdapDNOidLower() throws InvalidNameException { LdapDN dn = new LdapDN( "oid.12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "oid.12.34.56 = azerty" ) ); assertEquals( "oid.12.34.56=azerty", dn.toString() ); assertEquals( "oid.12.34.56 = azerty", dn.getUpName() ); } /** * test a simple DN with an oid attribut without oid prefix : 12.34.56 = * azerty */ @Test public void testLdapDNOidWithoutPrefix() throws InvalidNameException { LdapDN dn = new LdapDN( "12.34.56 = azerty" ); assertTrue( LdapDN.isValid( "12.34.56 = azerty" ) ); assertEquals( "12.34.56=azerty", dn.toString() ); assertEquals( "12.34.56 = azerty", dn.getUpName() ); } /** * test a composite DN with an oid attribut wiithout oid prefix : 12.34.56 = * azerty; 7.8 = test */ @Test public void testLdapDNCompositeOidWithoutPrefix() throws InvalidNameException { LdapDN dn = new LdapDN( "12.34.56 = azerty; 7.8 = test" ); assertTrue( LdapDN.isValid( "12.34.56 = azerty; 7.8 = test" ) ); assertEquals( "12.34.56=azerty,7.8=test", dn.toString() ); assertEquals( "12.34.56 = azerty; 7.8 = test", dn.getUpName() ); } /** * test a simple DN with pair char attribute value : a = \,\=\+\<\>\#\;\\\"\C4\8D" */ @Test public void testLdapDNPairCharAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D" ); assertTrue( LdapDN.isValid( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D" ) ); assertEquals( "a=\\,=\\+\\<\\>#\\;\\\\\\\"\u010D", dn.toString() ); assertEquals( "a = \\,\\=\\+\\<\\>\\#\\;\\\\\\\"\\C4\\8D", dn.getUpName() ); } /** * test a simple DN with pair char attribute value : "SN=Lu\C4\8Di\C4\87" */ @Test public void testLdapDNRFC253_Lucic() throws InvalidNameException { LdapDN dn = new LdapDN( "SN=Lu\\C4\\8Di\\C4\\87" ); assertTrue( LdapDN.isValid( "SN=Lu\\C4\\8Di\\C4\\87" ) ); assertEquals( "sn=Lu\u010Di\u0107", dn.toString() ); assertEquals( "SN=Lu\\C4\\8Di\\C4\\87", dn.getUpName() ); } /** * test a simple DN with hexString attribute value : a = #0010A0AAFF */ @Test public void testLdapDNHexStringAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = #0010A0AAFF" ); assertTrue( LdapDN.isValid( "a = #0010A0AAFF" ) ); assertEquals( "a=#0010A0AAFF", dn.toString() ); assertEquals( "a = #0010A0AAFF", dn.getUpName() ); } /** * test a simple DN with a # on first position */ @Test public void testLdapDNSharpFirst() throws InvalidNameException, NamingException { LdapDN dn = new LdapDN( "a = \\#this is a sharp" ); assertTrue( LdapDN.isValid( "a = \\#this is a sharp" ) ); assertEquals( "a=\\#this is a sharp", dn.toString() ); assertEquals( "a = \\#this is a sharp", dn.getUpName() ); Rdn rdn = dn.getRdn(); assertEquals( "a = \\#this is a sharp", rdn.getUpName() ); } /** * Normalize a simple DN with a # on first position */ @Test public void testNormalizeLdapDNSharpFirst() throws InvalidNameException, NamingException { LdapDN dn = new LdapDN( "ou = \\#this is a sharp" ); assertTrue( LdapDN.isValid( "ou = \\#this is a sharp" ) ); assertEquals( "ou=\\#this is a sharp", dn.toString() ); assertEquals( "ou = \\#this is a sharp", dn.getUpName() ); // Check the normalization now LdapDN ndn = dn.normalize( oidOids ); assertEquals( "ou = \\#this is a sharp", ndn.getUpName() ); assertEquals( "2.5.4.11=\\#this is a sharp", ndn.toString() ); } /** * Normalize a DN with sequence ESC ESC HEX HEX (\\DC). * This is a corner case for the parser and normalizer. */ @Test public void testNormalizeLdapDNEscEscHexHex() throws NamingException { LdapDN dn = new LdapDN( "ou = AC\\\\DC" ); assertTrue( LdapDN.isValid( "ou = AC\\\\DC" ) ); assertEquals( "ou=AC\\\\DC", dn.toString() ); assertEquals( "ou = AC\\\\DC", dn.getUpName() ); // Check the normalization now LdapDN ndn = dn.normalize( oidOids ); assertEquals( "ou = AC\\\\DC", ndn.getUpName() ); assertEquals( "2.5.4.11=ac\\\\dc", ndn.toString() ); } /** * test a simple DN with a wrong hexString attribute value : a = #0010Z0AAFF */ @Test public void testLdapDNWrongHexStringAttributeValue() { try { new LdapDN( "a = #0010Z0AAFF" ); fail(); } catch ( InvalidNameException ine ) { assertFalse( LdapDN.isValid( "a = #0010Z0AAFF" ) ); assertTrue( true ); } } /** * test a simple DN with a wrong hexString attribute value : a = #AABBCCDD3 */ @Test public void testLdapDNWrongHexStringAttributeValue2() { try { new LdapDN( "a = #AABBCCDD3" ); fail(); } catch ( InvalidNameException ine ) { assertFalse( LdapDN.isValid( "a = #AABBCCDD3" ) ); assertTrue( true ); } } /** * test a simple DN with a quote in attribute value : a = quoted \"value\" */ @Test public void testLdapDNQuoteInAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = quoted \\\"value\\\"" ); assertTrue( LdapDN.isValid( "a = quoted \\\"value\\\"" ) ); assertEquals( "a=quoted \\\"value\\\"", dn.toString() ); assertEquals( "a = quoted \\\"value\\\"", dn.getUpName() ); } /** * test a simple DN with quoted attribute value : a = \" quoted value \" */ @Test public void testLdapDNQuotedAttributeValue() throws InvalidNameException { LdapDN dn = new LdapDN( "a = \\\" quoted value \\\"" ); assertTrue( LdapDN.isValid( "a = \\\" quoted value \\\"" ) ); assertEquals( "a=\\\" quoted value \\\"", dn.toString() ); assertEquals( "a = \\\" quoted value \\\"", dn.getUpName() ); } /** * test a simple DN with a comma at the end */ @Test public void testLdapDNComaAtEnd() { assertFalse( LdapDN.isValid( "a = b," ) ); assertFalse( LdapDN.isValid( "a = b, " ) ); try { new LdapDN( "a = b," ); fail(); } catch ( InvalidNameException ine ) { assertTrue( true ); } } // REMOVE operation ------------------------------------------------------- /** * test a remove from position 0 */ @Test public void testLdapDNRemove0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "e=f", dn.remove( 0 ).toString() ); assertEquals( "a=b,c=d", dn.toString() ); assertEquals( "a=b, c=d", dn.getUpName() ); } /** * test a remove from position 1 */ @Test public void testLdapDNRemove1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "c=d", dn.remove( 1 ).toString() ); assertEquals( "a=b, e=f", dn.getUpName() ); } /** * test a remove from position 2 */ @Test public void testLdapDNRemove2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( "a=b", dn.remove( 2 ).toString() ); assertEquals( " c=d, e=f", dn.getUpName() ); } /** * test a remove from position 1 whith semi colon */ @Test public void testLdapDNRemove1WithSemiColon() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d; e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d; e=f" ) ); assertEquals( "c=d", dn.remove( 1 ).toString() ); assertEquals( "a=b, e=f", dn.getUpName() ); } /** * test a remove out of bound */ @Test public void testLdapDNRemoveOutOfBound() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d; e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d; e=f" ) ); try { dn.remove( 4 ); // We whould never reach this point fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } // SIZE operations /** * test a 0 size */ @Test public void testLdapDNSize0() { LdapDN dn = new LdapDN(); assertTrue( LdapDN.isValid( "" ) ); assertEquals( 0, dn.size() ); } /** * test a 1 size */ @Test public void testLdapDNSize1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); assertTrue( LdapDN.isValid( "a=b" ) ); assertEquals( 1, dn.size() ); } /** * test a 3 size */ @Test public void testLdapDNSize3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b, c=d, e=f" ) ); assertEquals( 3, dn.size() ); } /** * test a 3 size with NameComponents */ @Test public void testLdapDNSize3NC() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b+c=d, c=d, e=f" ); assertTrue( LdapDN.isValid( "a=b+c=d, c=d, e=f" ) ); assertEquals( 3, dn.size() ); } /** * test size after operations */ @Test public void testLdapResizing() throws InvalidNameException { LdapDN dn = new LdapDN(); assertEquals( 0, dn.size() ); dn.add( "e = f" ); assertEquals( 1, dn.size() ); dn.add( "c = d" ); assertEquals( 2, dn.size() ); dn.remove( 0 ); assertEquals( 1, dn.size() ); dn.remove( 0 ); assertEquals( 0, dn.size() ); } // ADD Operations /** * test Add on a new LdapDN */ @Test public void testLdapEmptyAdd() throws InvalidNameException { LdapDN dn = new LdapDN(); dn.add( "e = f" ); assertEquals( "e=f", dn.toString() ); assertEquals( "e = f", dn.getUpName() ); assertEquals( 1, dn.size() ); } /** * test Add to an existing LdapDN */ @Test public void testLdapDNAdd() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( "e = f" ); assertEquals( "e=f,a=b,c=d", dn.toString() ); assertEquals( "e = f,a=b, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add a composite RDN to an existing LdapDN */ @Test public void testLdapDNAddComposite() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( "e = f + g = h" ); // Warning ! The order of AVAs has changed during the parsing // This has no impact on the correctness of the DN, but the // String used to do the comparizon should be inverted. assertEquals( "e=f+g=h,a=b,c=d", dn.toString() ); assertEquals( 3, dn.size() ); } /** * test Add at the end of an existing LdapDN */ @Test public void testLdapDNAddEnd() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( dn.size(), "e = f" ); assertEquals( "e = f,a=b, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add at the start of an existing LdapDN */ @Test public void testLdapDNAddStart() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( 0, "e = f" ); assertEquals( "a=b, c=d,e = f", dn.getUpName() ); assertEquals( 3, dn.size() ); } /** * test Add at the middle of an existing LdapDN */ @Test public void testLdapDNAddMiddle() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.add( 1, "e = f" ); assertEquals( "a=b,e = f, c=d", dn.getUpName() ); assertEquals( 3, dn.size() ); } // ADD ALL Operations /** * Test AddAll * * @throws InvalidNameException */ @Test public void testLdapDNAddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( dn2 ); assertEquals( "c = d,a = b", dn.getUpName() ); } /** * Test AddAll with an empty added name * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAddedNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN(); dn.addAll( dn2 ); assertEquals( "a=b", dn.toString() ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll to an empty name * * @throws InvalidNameException */ @Test public void testLdapDNAddAllNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN(); LdapDN dn2 = new LdapDN( "a = b" ); dn.addAll( dn2 ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAt0AddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( 0, dn2 ); assertEquals( "a = b,c = d", dn.getUpName() ); } /** * Test AddAll at position 1 * * @throws InvalidNameException */ @Test public void testLdapDNAt1AddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN( "c = d" ); dn.addAll( 1, dn2 ); assertEquals( "c = d,a = b", dn.getUpName() ); } /** * Test AddAll at the middle * * @throws InvalidNameException */ @Test public void testLdapDNAtTheMiddleAddAll() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b, c = d" ); LdapDN dn2 = new LdapDN( "e = f" ); dn.addAll( 1, dn2 ); assertEquals( "a = b,e = f, c = d", dn.getUpName() ); } /** * Test AddAll with an empty added name at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAt0AddedNameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); LdapDN dn2 = new LdapDN(); dn.addAll( 0, dn2 ); assertEquals( "a=b", dn.toString() ); assertEquals( "a = b", dn.getUpName() ); } /** * Test AddAll to an empty name at position 0 * * @throws InvalidNameException */ @Test public void testLdapDNAddAllAt0NameEmpty() throws InvalidNameException { LdapDN dn = new LdapDN(); LdapDN dn2 = new LdapDN( "a = b" ); dn.addAll( 0, dn2 ); assertEquals( "a = b", dn.getUpName() ); } // GET PREFIX actions /** * Get the prefix at pos 0 */ @Test public void testLdapDNGetPrefixPos0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 0 ) ); assertEquals( "", newDn.getUpName() ); } /** * Get the prefix at pos 1 */ @Test public void testLdapDNGetPrefixPos1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 1 ) ); assertEquals( "e = f", newDn.getUpName() ); } /** * Get the prefix at pos 2 */ @Test public void testLdapDNGetPrefixPos2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 2 ) ); assertEquals( " c=d,e = f", newDn.getUpName() ); } /** * Get the prefix at pos 3 */ @Test public void testLdapDNGetPrefixPos3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 3 ) ); assertEquals( "a=b, c=d,e = f", newDn.getUpName() ); } /** * Get the prefix out of bound */ @Test public void testLdapDNGetPrefixPos4() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); try { dn.getPrefix( 4 ); // We should not reach this point. fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } /** * Get the prefix of an empty LdapName */ @Test public void testLdapDNGetPrefixEmptyDN() { LdapDN dn = new LdapDN(); LdapDN newDn = ( ( LdapDN ) dn.getPrefix( 0 ) ); assertEquals( "", newDn.getUpName() ); } // GET SUFFIX operations /** * Get the suffix at pos 0 */ @Test public void testLdapDNGetSuffixPos0() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 0 ) ); assertEquals( "a=b, c=d,e = f", newDn.getUpName() ); } /** * Get the suffix at pos 1 */ @Test public void testLdapDNGetSuffixPos1() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 1 ) ); assertEquals( "a=b, c=d", newDn.getUpName() ); } /** * Get the suffix at pos 2 */ @Test public void testLdapDNGetSuffixPos2() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 2 ) ); assertEquals( "a=b", newDn.getUpName() ); } /** * Get the suffix at pos 3 */ @Test public void testLdapDNGetSuffixPos3() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 3 ) ); assertEquals( "", newDn.getUpName() ); } /** * Get the suffix out of bound */ @Test public void testLdapDNGetSuffixPos4() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); try { dn.getSuffix( 4 ); // We should not reach this point. fail(); } catch ( ArrayIndexOutOfBoundsException aoobe ) { assertTrue( true ); } } /** * Get the suffix of an empty LdapName */ @Test public void testLdapDNGetSuffixEmptyDN() { LdapDN dn = new LdapDN(); LdapDN newDn = ( ( LdapDN ) dn.getSuffix( 0 ) ); assertEquals( "", newDn.getUpName() ); } // IS EMPTY operations /** * Test that a LdapDN is empty */ @Test public void testLdapDNIsEmpty() { LdapDN dn = new LdapDN(); assertEquals( true, dn.isEmpty() ); } /** * Test that a LdapDN is empty */ @Test public void testLdapDNNotEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); assertEquals( false, dn.isEmpty() ); } /** * Test that a LdapDN is empty */ @Test public void testLdapDNRemoveIsEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d" ); dn.remove( 0 ); dn.remove( 0 ); assertEquals( true, dn.isEmpty() ); } // STARTS WITH operations /** * Test a startsWith a null LdapDN */ @Test public void testLdapDNStartsWithNull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( null ) ); } /** * Test a startsWith an empty LdapDN */ @Test public void testLdapDNStartsWithEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN() ) ); } /** * Test a startsWith an simple LdapDN */ @Test public void testLdapDNStartsWithSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "e=f" ) ) ); } /** * Test a startsWith a complex LdapDN */ @Test public void testLdapDNStartsWithComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "c = d, e = f" ) ) ); } /** * Test a startsWith a complex LdapDN */ @Test public void testLdapDNStartsWithComplexMixedCase() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.startsWith( new LdapDN( "c = D, E = f" ) ) ); } /** * Test a startsWith a full LdapDN */ @Test public void testLdapDNStartsWithFull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.startsWith( new LdapDN( "a= b; c = d, e = f" ) ) ); } /** * Test a startsWith which returns false */ @Test public void testLdapDNStartsWithWrong() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.startsWith( new LdapDN( "c = t, e = f" ) ) ); } // ENDS WITH operations /** * Test a endsWith a null LdapDN */ @Test public void testLdapDNEndsWithNull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( null ) ); } /** * Test a endsWith an empty LdapDN */ @Test public void testLdapDNEndsWithEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN() ) ); } /** * Test a endsWith an simple LdapDN */ @Test public void testLdapDNEndsWithSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a=b" ) ) ); } /** * Test a endsWith a complex LdapDN */ @Test public void testLdapDNEndsWithComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a = b, c = d" ) ) ); } /** * Test a endsWith a complex LdapDN */ @Test public void testLdapDNEndsWithComplexMixedCase() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.endsWith( new LdapDN( "a = B, C = d" ) ) ); } /** * Test a endsWith a full LdapDN */ @Test public void testLdapDNEndsWithFull() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( true, dn.endsWith( new LdapDN( "a= b; c = d, e = f" ) ) ); } /** * Test a endsWith which returns false */ @Test public void testLdapDNEndsWithWrong() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b, c=d,e = f" ); assertEquals( false, dn.endsWith( new LdapDN( "a = b, e = f" ) ) ); } // GET ALL operations /** * test a getAll operation on a null DN */ @Test public void testLdapDNGetAllNull() { LdapDN dn = new LdapDN(); Enumeration<String> nc = dn.getAll(); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on an empty DN */ @Test public void testLdapDNGetAllEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); Enumeration<String> nc = dn.getAll(); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a simple DN */ @Test public void testLdapDNGetAllSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a complex DN */ @Test public void testLdapDNGetAllComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "e=f+g=h,a=b,c=d" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "c=d", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "e=f+g=h", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } /** * test a getAll operation on a complex DN */ @Test public void testLdapDNGetAllComplexOrdered() throws InvalidNameException { LdapDN dn = new LdapDN( "g=h+e=f,a=b,c=d" ); Enumeration<String> nc = dn.getAll(); assertEquals( true, nc.hasMoreElements() ); assertEquals( "c=d", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); assertEquals( "a=b", nc.nextElement() ); assertEquals( true, nc.hasMoreElements() ); // The lowest atav should be the first one assertEquals( "e=f+g=h", nc.nextElement() ); assertEquals( false, nc.hasMoreElements() ); } // CLONE Operation /** * test a clone operation on a empty DN */ @Test public void testLdapDNCloneEmpty() { LdapDN dn = new LdapDN(); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "", clone.getUpName() ); } /** * test a clone operation on a simple DN */ @Test public void testLdapDNCloneSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a=b" ); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "a=b", clone.getUpName() ); dn.remove( 0 ); assertEquals( "a=b", clone.getUpName() ); } /** * test a clone operation on a complex DN */ @Test public void testLdapDNCloneComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "e=f+g=h,a=b,c=d" ); LdapDN clone = ( LdapDN ) dn.clone(); assertEquals( "e=f+g=h,a=b,c=d", clone.getUpName() ); dn.remove( 2 ); assertEquals( "e=f+g=h,a=b,c=d", clone.getUpName() ); } // GET operations /** * test a get in a null DN */ @Test public void testLdapDNGetNull() { LdapDN dn = new LdapDN(); assertEquals( "", dn.get( 0 ) ); } /** * test a get in an empty DN */ @Test public void testLdapDNGetEmpty() throws InvalidNameException { LdapDN dn = new LdapDN( "" ); assertEquals( "", dn.get( 0 ) ); } /** * test a get in a simple DN */ @Test public void testLdapDNGetSimple() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b" ); assertEquals( "a=b", dn.get( 0 ) ); } /** * test a get in a complex DN */ @Test public void testLdapDNGetComplex() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c= d, e= f; g =h" ); assertEquals( "g=h", dn.get( 0 ) ); assertEquals( "e=f", dn.get( 1 ) ); assertEquals( "a=b+c=d", dn.get( 2 ) ); } /** * test a get out of bound */ @Test public void testLdapDNGetOutOfBound() throws InvalidNameException { LdapDN dn = new LdapDN( "a = b + c= d, e= f; g =h" ); try { dn.get( 4 ); fail(); } catch ( IndexOutOfBoundsException aioob ) { assertTrue( true ); } } /** * Tests the examples from the JNDI tutorials to make sure LdapName behaves * appropriately. The example can be found online <a href="">here</a>. * * @throws Exception * if anything goes wrong */ @Test public void testJNDITutorialExample() throws Exception { // Parse the name Name name = new LdapDN( "cn=John,ou=People,ou=Marketing" ); // Remove the second component from the head: ou=People String out = name.remove( 1 ).toString(); assertEquals( "ou=People", out ); // Add to the head (first): cn=John,ou=Marketing,ou=East out = name.add( 0, "ou=East" ).toString(); assertEquals( "cn=John,ou=Marketing,ou=East", out ); // Add to the tail (last): cn=HomeDir,cn=John,ou=Marketing,ou=East out = name.add( "cn=HomeDir" ).toString(); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", out ); } @Test public void testAttributeEqualsIsCaseInSensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir" ); Name name2 = new LdapDN( "CN=HomeDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testAttributeTypeEqualsIsCaseInsensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=HomeDir+CN=WorkDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testNameEqualsIsInsensitiveToAttributesOrder() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=WorkDir+cn=HomeDir" ); assertTrue( name1.equals( name2 ) ); } @Test public void testAttributeComparisonIsCaseInSensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir" ); Name name2 = new LdapDN( "CN=HomeDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testAttributeTypeComparisonIsCaseInsensitive() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=HomeDir+CN=WorkDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testNameComparisonIsInsensitiveToAttributesOrder() throws Exception { Name name1 = new LdapDN( "cn=HomeDir+cn=WorkDir" ); Name name2 = new LdapDN( "cn=WorkDir+cn=HomeDir" ); assertEquals( 0, name1.compareTo( name2 ) ); } @Test public void testNameComparisonIsInsensitiveToAttributesOrderFailure() throws Exception { Name name1 = new LdapDN( "cn= HomeDir+cn=Workdir" ); Name name2 = new LdapDN( "cn = Work+cn=HomeDir" ); assertEquals( 1, name1.compareTo( name2 ) ); } /** * Test the encoding of a LdanDN */ @Test public void testNameToBytes() throws Exception { LdapDN dn = new LdapDN( "cn = John, ou = People, OU = Marketing" ); byte[] bytes = LdapDN.getBytes( dn ); assertEquals( 30, LdapDN.getNbBytes( dn ) ); assertEquals( "cn=John,ou=People,ou=Marketing", new String( bytes, "UTF-8" ) ); } @Test public void testStringParser() throws Exception { String dn = StringTools.utf8ToString( new byte[] { 'C', 'N', ' ', '=', ' ', 'E', 'm', 'm', 'a', 'n', 'u', 'e', 'l', ' ', ' ', 'L', ( byte ) 0xc3, ( byte ) 0xa9, 'c', 'h', 'a', 'r', 'n', 'y' } ); Name name = LdapDnParser.getNameParser().parse( dn ); assertEquals( dn, ( ( LdapDN ) name ).getUpName() ); assertEquals( "cn=Emmanuel L\u00E9charny", name.toString() ); } /** * Class to test for void LdapName(String) * * @throws Exception * if anything goes wrong. */ @Test public void testLdapNameString() throws Exception { Name name = new LdapDN( "" ); Name name50 = new LdapDN(); assertEquals( name50, name ); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name copy = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name4 = new LdapDN( "cn=Website,cn=John,ou=Marketing,ou=West" ); Name name5 = new LdapDN( "cn=Airline,cn=John,ou=Marketing,ou=West" ); assertTrue( name0.compareTo( copy ) == 0 ); assertTrue( name0.compareTo( name1 ) < 0 ); assertTrue( name0.compareTo( name2 ) < 0 ); assertTrue( name1.compareTo( name2 ) < 0 ); assertTrue( name2.compareTo( name1 ) > 0 ); assertTrue( name2.compareTo( name0 ) > 0 ); assertTrue( name2.compareTo( name3 ) < 0 ); assertTrue( name2.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name5 ) > 0 ); assertTrue( name4.compareTo( name5 ) > 0 ); assertTrue( name2.compareTo( name5 ) < 0 ); } /** * Class to test for void LdapName() */ @Test public void testLdapName() { Name name = new LdapDN(); assertTrue( name.toString().equals( "" ) ); } /** * Class to test for void LdapName(List) */ @Test public void testLdapNameList() throws InvalidNameException { List<String> list = new ArrayList<String>(); list.add( "ou=People" ); list.add( "dc=example" ); list.add( "dc=com" ); Name name = new LdapDN( list ); assertTrue( name.toString().equals( "ou=People,dc=example,dc=com" ) ); } /** * Class to test for void LdapName(Iterator) */ @Test public void testLdapNameIterator() throws InvalidNameException { List<String> list = new ArrayList<String>(); list.add( "ou=People" ); list.add( "dc=example" ); list.add( "dc=com" ); Name name = new LdapDN( list.iterator() ); assertTrue( name.toString().equals( "ou=People,dc=example,dc=com" ) ); } /** * Class to test for Object clone() * * @throws Exception * if anything goes wrong. */ @Test public void testClone() throws Exception { String strName = "cn=HomeDir,cn=John,ou=Marketing,ou=East"; Name name = new LdapDN( strName ); assertEquals( name, name.clone() ); } /** * Class to test for compareTo * * @throws Exception * if anything goes wrong. */ @Test public void testCompareTo() throws Exception { Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name copy = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name4 = new LdapDN( "cn=Website,cn=John,ou=Marketing,ou=West" ); Name name5 = new LdapDN( "cn=Airline,cn=John,ou=Marketing,ou=West" ); assertTrue( name0.compareTo( copy ) == 0 ); assertTrue( name0.compareTo( name1 ) < 0 ); assertTrue( name0.compareTo( name2 ) < 0 ); assertTrue( name1.compareTo( name2 ) < 0 ); assertTrue( name2.compareTo( name1 ) > 0 ); assertTrue( name2.compareTo( name0 ) > 0 ); assertTrue( name2.compareTo( name3 ) < 0 ); assertTrue( name2.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name4 ) < 0 ); assertTrue( name3.compareTo( name5 ) > 0 ); assertTrue( name4.compareTo( name5 ) > 0 ); assertTrue( name2.compareTo( name5 ) < 0 ); List<Name> list = new ArrayList<Name>(); Comparator<Name> comparator = new Comparator<Name>() { public int compare( Name obj1, Name obj2 ) { Name n1 = obj1; Name n2 = obj2; return n1.compareTo( n2 ); } public boolean equals( Object obj ) { return super.equals( obj ); } /** * Compute the instance's hash code * @return the instance's hash code */ public int hashCode() { return super.hashCode(); } }; list.add( name0 ); list.add( name1 ); list.add( name2 ); list.add( name3 ); list.add( name4 ); list.add( name5 ); Collections.sort( list, comparator ); assertEquals( name0, list.get( 0 ) ); assertEquals( name1, list.get( 1 ) ); assertEquals( name2, list.get( 2 ) ); assertEquals( name5, list.get( 3 ) ); assertEquals( name3, list.get( 4 ) ); assertEquals( name4, list.get( 5 ) ); } /** * Class to test for size * * @throws Exception * if anything goes wrong. */ @Test public void testSize() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); assertEquals( 0, name0.size() ); assertEquals( 1, name1.size() ); assertEquals( 2, name2.size() ); assertEquals( 3, name3.size() ); assertEquals( 4, name4.size() ); assertEquals( 5, name5.size() ); assertEquals( 6, name6.size() ); } /** * Class to test for isEmpty * * @throws Exception * if anything goes wrong. */ @Test public void testIsEmpty() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); assertEquals( true, name0.isEmpty() ); assertEquals( false, name1.isEmpty() ); assertEquals( false, name2.isEmpty() ); assertEquals( false, name3.isEmpty() ); assertEquals( false, name4.isEmpty() ); assertEquals( false, name5.isEmpty() ); assertEquals( false, name6.isEmpty() ); } /** * Class to test for getAll * * @throws Exception * if anything goes wrong. */ @Test public void testGetAll() throws Exception { Name name0 = new LdapDN( "" ); Name name1 = new LdapDN( "ou=East" ); Name name2 = new LdapDN( "ou=Marketing,ou=East" ); Name name3 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name4 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name5 = new LdapDN( "cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Name name6 = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Enumeration<String> enum0 = name0.getAll(); assertEquals( false, enum0.hasMoreElements() ); Enumeration<String> enum1 = name1.getAll(); assertEquals( true, enum1.hasMoreElements() ); for ( int i = 0; enum1.hasMoreElements(); i++ ) { String element = ( String ) enum1.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } } Enumeration<String> enum2 = name2.getAll(); assertEquals( true, enum2.hasMoreElements() ); for ( int i = 0; enum2.hasMoreElements(); i++ ) { String element = ( String ) enum2.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } } Enumeration<String> enum3 = name3.getAll(); assertEquals( true, enum3.hasMoreElements() ); for ( int i = 0; enum3.hasMoreElements(); i++ ) { String element = ( String ) enum3.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } } Enumeration<String> enum4 = name4.getAll(); assertEquals( true, enum4.hasMoreElements() ); for ( int i = 0; enum4.hasMoreElements(); i++ ) { String element = ( String ) enum4.nextElement(); if ( i == 0 ) { assertEquals( "ou=East", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } } Enumeration<String> enum5 = name5.getAll(); assertEquals( true, enum5.hasMoreElements() ); for ( int i = 0; enum5.hasMoreElements(); i++ ) { String element = ( String ) enum5.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } if ( i == 4 ) { assertEquals( "cn=Website", element ); } } Enumeration<String> enum6 = name6.getAll(); assertEquals( true, enum6.hasMoreElements() ); for ( int i = 0; enum6.hasMoreElements(); i++ ) { String element = ( String ) enum6.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element ); } if ( i == 2 ) { assertEquals( "cn=John", element ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element ); } if ( i == 4 ) { assertEquals( "cn=Website", element ); } if ( i == 5 ) { assertEquals( "cn=Airline", element ); } } } /** * Class to test for getAllRdn * * @throws Exception * if anything goes wrong. */ @Test public void testGetAllRdn() throws Exception { LdapDN name = new LdapDN( "cn=Airline,cn=Website,cn=HomeDir,cn=John,ou=Marketing,ou=West" ); Enumeration<Rdn> rdns = name.getAllRdn(); assertEquals( true, rdns.hasMoreElements() ); for ( int i = 0; rdns.hasMoreElements(); i++ ) { Rdn element = ( Rdn ) rdns.nextElement(); if ( i == 0 ) { assertEquals( "ou=West", element.toString() ); } if ( i == 1 ) { assertEquals( "ou=Marketing", element.toString() ); } if ( i == 2 ) { assertEquals( "cn=John", element.toString() ); } if ( i == 3 ) { assertEquals( "cn=HomeDir", element.toString() ); } if ( i == 4 ) { assertEquals( "cn=Website", element.toString() ); } if ( i == 5 ) { assertEquals( "cn=Airline", element.toString() ); } } } /** * Class to test for get * * @throws Exception * anything goes wrong */ @Test public void testGet() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "cn=HomeDir", name.get( 3 ) ); assertEquals( "cn=John", name.get( 2 ) ); assertEquals( "ou=Marketing", name.get( 1 ) ); assertEquals( "ou=East", name.get( 0 ) ); } /** * Class to test for getSuffix * * @throws Exception * anything goes wrong */ @Test public void testGetXSuffix() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "", name.getSuffix( 4 ).toString() ); assertEquals( "cn=HomeDir", name.getSuffix( 3 ).toString() ); assertEquals( "cn=HomeDir,cn=John", name.getSuffix( 2 ).toString() ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing", name.getSuffix( 1 ).toString() ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.getSuffix( 0 ).toString() ); } /** * Class to test for getPrefix * * @throws Exception * anything goes wrong */ @Test public void testGetPrefix() throws Exception { Name name = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.getPrefix( 4 ).toString() ); assertEquals( "cn=John,ou=Marketing,ou=East", name.getPrefix( 3 ).toString() ); assertEquals( "ou=Marketing,ou=East", name.getPrefix( 2 ).toString() ); assertEquals( "ou=East", name.getPrefix( 1 ).toString() ); assertEquals( "", name.getPrefix( 0 ).toString() ); } /** * Class to test for startsWith * * @throws Exception * anything goes wrong */ @Test public void testStartsWith() throws Exception { Name n0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name n1 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name n2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name n3 = new LdapDN( "ou=Marketing,ou=East" ); Name n4 = new LdapDN( "ou=East" ); Name n5 = new LdapDN( "" ); Name n6 = new LdapDN( "cn=HomeDir" ); Name n7 = new LdapDN( "cn=HomeDir,cn=John" ); Name n8 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing" ); // Check with LdapDN assertTrue( n0.startsWith( n1 ) ); assertTrue( n0.startsWith( n2 ) ); assertTrue( n0.startsWith( n3 ) ); assertTrue( n0.startsWith( n4 ) ); assertTrue( n0.startsWith( n5 ) ); assertTrue( !n0.startsWith( n6 ) ); assertTrue( !n0.startsWith( n7 ) ); assertTrue( !n0.startsWith( n8 ) ); Name nn0 = new LdapDN( "cn=zero" ); Name nn10 = new LdapDN( "cn=one,cn=zero" ); Name nn210 = new LdapDN( "cn=two,cn=one,cn=zero" ); Name nn3210 = new LdapDN( "cn=three,cn=two,cn=one,cn=zero" ); assertTrue( nn0.startsWith( nn0 ) ); assertTrue( nn10.startsWith( nn0 ) ); assertTrue( nn210.startsWith( nn0 ) ); assertTrue( nn3210.startsWith( nn0 ) ); assertTrue( nn10.startsWith( nn10 ) ); assertTrue( nn210.startsWith( nn10 ) ); assertTrue( nn3210.startsWith( nn10 ) ); assertTrue( nn210.startsWith( nn210 ) ); assertTrue( nn3210.startsWith( nn210 ) ); assertTrue( nn3210.startsWith( nn3210 ) ); // Check with LdapName Name name0 = new LdapName( "cn=zero" ); Name name10 = new LdapName( "cn=one,cn=zero" ); Name name210 = new LdapName( "cn=two,cn=one,cn=zero" ); Name name3210 = new LdapName( "cn=three,cn=two,cn=one,cn=zero" ); // Check with Name assertTrue( nn0.startsWith( name0 ) ); assertTrue( nn10.startsWith( name0 ) ); assertTrue( nn210.startsWith( name0 ) ); assertTrue( nn3210.startsWith( name0 ) ); assertTrue( nn10.startsWith( name10 ) ); assertTrue( nn210.startsWith( name10 ) ); assertTrue( nn3210.startsWith( name10 ) ); assertTrue( nn210.startsWith( name210 ) ); assertTrue( nn3210.startsWith( name210 ) ); assertTrue( nn3210.startsWith( name3210 ) ); assertTrue( "Starting DN fails with ADS LdapDN", new LdapDN( "ou=foo,dc=apache,dc=org" ).startsWith( new LdapDN( "dc=apache,dc=org" ) ) ); assertTrue( "Starting DN fails with Java LdapName", new LdapDN( "ou=foo,dc=apache,dc=org" ).startsWith( new LdapName( "dc=apache,dc=org" ) ) ); assertTrue( "Starting DN fails with Java LdapName", new LdapDN( "dc=apache,dc=org" ).startsWith( new LdapName( "dc=apache,dc=org" ) ) ); } /** * Class to test for endsWith * * @throws Exception * anything goes wrong */ @Test public void testEndsWith() throws Exception { Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); Name name4 = new LdapDN( "ou=East" ); Name name5 = new LdapDN( "" ); Name name6 = new LdapDN( "cn=HomeDir" ); Name name7 = new LdapDN( "cn=HomeDir,cn=John" ); Name name8 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing" ); assertTrue( name0.endsWith( name1 ) ); assertTrue( !name0.endsWith( name2 ) ); assertTrue( !name0.endsWith( name3 ) ); assertTrue( !name0.endsWith( name4 ) ); assertTrue( name0.endsWith( name5 ) ); assertTrue( name0.endsWith( name6 ) ); assertTrue( name0.endsWith( name7 ) ); assertTrue( name0.endsWith( name8 ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllName0() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllNameExisting0() throws Exception { Name name1 = new LdapDN( "ou=Marketing,ou=East" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John" ); Name nameAdded = new LdapDN( "cn=HomeDir,cn=John, ou=Marketing,ou=East" ); assertTrue( nameAdded.equals( name1.addAll( name2 ) ) ); } /** * Class to test for Name addAll(Name) * * @throws Exception * when anything goes wrong */ @Test public void testAddAllName1() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( name1 ) ) ); } /** * Class to test for Name addAll(int, Name) * * @throws Exception * when something goes wrong */ @Test public void testAddAllintName0() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=HomeDir,cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( 2, name1 ) ) ); } /** * Class to test for Name addAll(int, Name) * * @throws Exception * when something goes wrong */ @Test public void testAddAllintName1() throws Exception { Name name = new LdapDN(); Name name0 = new LdapDN( "cn=HomeDir,ou=Marketing,ou=East" ); Name name1 = new LdapDN( "cn=John" ); Name name2 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name0.equals( name.addAll( name0 ) ) ); assertTrue( name2.equals( name.addAll( 2, name1 ) ) ); Name name3 = new LdapDN( "cn=Airport" ); Name name4 = new LdapDN( "cn=Airport,cn=HomeDir,cn=John,ou=Marketing,ou=East" ); assertTrue( name4.equals( name.addAll( 4, name3 ) ) ); Name name5 = new LdapDN( "cn=ABC123" ); Name name6 = new LdapDN( "cn=Airport,cn=HomeDir,cn=ABC123,cn=John,ou=Marketing,ou=East" ); assertTrue( name6.equals( name.addAll( 3, name5 ) ) ); } /** * Class to test for Name add(String) * * @throws Exception * when something goes wrong */ @Test public void testAddString() throws Exception { Name name = new LdapDN(); assertEquals( name, new LdapDN( "" ) ); Name name4 = new LdapDN( "ou=East" ); name.add( "ou=East" ); assertEquals( name4, name ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); name.add( "ou=Marketing" ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); name.add( "cn=John" ); assertEquals( name2, name ); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); name.add( "cn=HomeDir" ); assertEquals( name0, name ); } /** * Class to test for Name add(int, String) * * @throws Exception * if anything goes wrong */ @Test public void testAddintString() throws Exception { Name name = new LdapDN(); assertEquals( name, new LdapDN( "" ) ); Name name4 = new LdapDN( "ou=East" ); name.add( "ou=East" ); assertEquals( name4, name ); Name name3 = new LdapDN( "ou=Marketing,ou=East" ); name.add( 1, "ou=Marketing" ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=John,ou=Marketing,ou=East" ); name.add( 2, "cn=John" ); assertEquals( name2, name ); Name name0 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East" ); name.add( 3, "cn=HomeDir" ); assertEquals( name0, name ); Name name5 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East,o=LL " + "Bean Inc." ); name.add( 0, "o=LL Bean Inc." ); assertEquals( name5, name ); Name name6 = new LdapDN( "cn=HomeDir,cn=John,ou=Marketing,ou=East,c=US,o=LL " + "Bean Inc." ); name.add( 1, "c=US" ); assertEquals( name6, name ); Name name7 = new LdapDN( "cn=HomeDir,cn=John,ou=Advertising,ou=Marketing," + "ou=East,c=US,o=LL " + "Bean Inc." ); name.add( 4, "ou=Advertising" ); assertEquals( name7, name ); } /** * Class to test for remove * * @throws Exception * if anything goes wrong */ @Test public void testRemove() throws Exception { Name name = new LdapDN(); assertEquals( new LdapDN( "" ), name ); Name name3 = new LdapDN( "ou=Marketing" ); name.add( "ou=East" ); name.add( 1, "ou=Marketing" ); name.remove( 0 ); assertEquals( name3, name ); Name name2 = new LdapDN( "cn=HomeDir,ou=Marketing,ou=East" ); name.add( 0, "ou=East" ); name.add( 2, "cn=John" ); name.add( "cn=HomeDir" ); name.remove( 2 ); assertEquals( name2, name ); name.remove( 1 ); Name name1 = new LdapDN( "cn=HomeDir,ou=East" ); assertEquals( name1, name ); name.remove( 1 ); Name name0 = new LdapDN( "ou=East" ); assertEquals( name0, name ); name.remove( 0 ); assertEquals( new LdapDN( "" ), name ); } /** * Class to test for String toString() * * @throws Exception * if anything goes wrong */ @Test public void testToString() throws Exception { Name name = new LdapDN(); assertEquals( "", name.toString() ); name.add( "ou=East" ); assertEquals( "ou=East", name.toString() ); name.add( 1, "ou=Marketing" ); assertEquals( "ou=Marketing,ou=East", name.toString() ); name.add( "cn=John" ); assertEquals( "cn=John,ou=Marketing,ou=East", name.toString() ); name.add( "cn=HomeDir" ); assertEquals( "cn=HomeDir,cn=John,ou=Marketing,ou=East", name.toString() ); } /** * Class to test for boolean equals(Object) * * @throws Exception * if anything goes wrong */ @Test public void testEqualsObject() throws Exception { assertTrue( new LdapDN( "ou=People" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=People,dc=example,dc=com" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=people" ).equals( new LdapDN( "ou=People" ) ) ); assertTrue( !new LdapDN( "ou=Groups" ).equals( new LdapDN( "ou=People" ) ) ); } @Test public void testNameFrenchChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', 0x4A, ( byte ) 0xC3, ( byte ) 0xA9, 0x72, ( byte ) 0xC3, ( byte ) 0xB4, 0x6D, 0x65 }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=J\u00e9r\u00f4me", name.toString() ); } @Test public void testNameGermanChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', ( byte ) 0xC3, ( byte ) 0x84, ( byte ) 0xC3, ( byte ) 0x96, ( byte ) 0xC3, ( byte ) 0x9C, ( byte ) 0xC3, ( byte ) 0x9F, ( byte ) 0xC3, ( byte ) 0xA4, ( byte ) 0xC3, ( byte ) 0xB6, ( byte ) 0xC3, ( byte ) 0xBC }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=\u00C4\u00D6\u00DC\u00DF\u00E4\u00F6\u00FC", name.toString() ); } @Test public void testNameTurkishChars() throws Exception { String cn = new String( new byte[] { 'c', 'n', '=', ( byte ) 0xC4, ( byte ) 0xB0, ( byte ) 0xC4, ( byte ) 0xB1, ( byte ) 0xC5, ( byte ) 0x9E, ( byte ) 0xC5, ( byte ) 0x9F, ( byte ) 0xC3, ( byte ) 0x96, ( byte ) 0xC3, ( byte ) 0xB6, ( byte ) 0xC3, ( byte ) 0x9C, ( byte ) 0xC3, ( byte ) 0xBC, ( byte ) 0xC4, ( byte ) 0x9E, ( byte ) 0xC4, ( byte ) 0x9F }, "UTF-8" ); Name name = new LdapDN( cn ); assertEquals( "cn=\u0130\u0131\u015E\u015F\u00D6\u00F6\u00DC\u00FC\u011E\u011F", name.toString() ); } /** * Class to test for toOid( Name, Map) */ @Test public void testLdapNameToName() throws Exception { List<String> list = new ArrayList<String>(); list.add( "ou= Some People " ); list.add( "dc = eXample" ); list.add( "dc= cOm" ); LdapDN name = new LdapDN( list.iterator() ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Name result = LdapDN.normalize( name, oids ); assertTrue( result.toString().equals( "ou=some people,dc=example,dc=com" ) ); } @Test public void testRdnGetTypeUpName() throws Exception { List<String> list = new ArrayList<String>(); list.add( "ou= Some People " ); list.add( "dc = eXample" ); list.add( "dc= cOm" ); LdapDN name = new LdapDN( list.iterator() ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Rdn rdn = name.getRdn(); assertEquals( "ou= Some People ", rdn.getUpName() ); assertEquals( "ou", rdn.getNormType() ); assertEquals( "ou", rdn.getUpType() ); LdapDN result = LdapDN.normalize( name, oidOids ); assertTrue( result.getNormName().equals( "2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); assertTrue( name.getUpName().equals( "ou= Some People ,dc = eXample,dc= cOm" ) ); Rdn rdn2 = result.getRdn(); assertEquals( "ou= Some People ", rdn2.getUpName() ); assertEquals( "2.5.4.11", rdn2.getNormType() ); assertEquals( "ou", rdn2.getUpType() ); } /** * Class to test for toOid( Name, Map) with a NULL dn */ @Test public void testLdapNameToNameEmpty() throws Exception { LdapDN name = new LdapDN(); Name result = LdapDN.normalize( name, oids ); assertTrue( result.toString().equals( "" ) ); } /** * Class to test for toOid( Name, Map) with a multiple NameComponent */ @Test public void testLdapNameToNameMultiNC() throws Exception { LdapDN name = new LdapDN( "2.5.4.11= Some People + 0.9.2342.19200300.100.1.25= And Some anImAls,0.9.2342.19200300.100.1.25 = eXample,dc= cOm" ); Name result = LdapDN.normalize( name, oidOids ); assertEquals( result.toString(), "0.9.2342.19200300.100.1.25=and some animals+2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ); assertTrue( ( ( LdapDN ) result ) .getUpName() .equals( "2.5.4.11= Some People + 0.9.2342.19200300.100.1.25= And Some anImAls,0.9.2342.19200300.100.1.25 = eXample,dc= cOm" ) ); } /** * Class to test for toOid( Name, Map) with a multiple NameComponent */ @Test public void testLdapNameToNameAliasMultiNC() throws Exception { LdapDN name = new LdapDN( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm" ); LdapDN result = LdapDN.normalize( name, oidOids ); assertTrue( result .toString() .equals( "0.9.2342.19200300.100.1.25=and some animals+2.5.4.11=some people,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); assertTrue( result .getUpName() .equals( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm" ) ); } /** * Class to test for hashCode(). */ @Test public void testLdapNameHashCode() throws Exception { Name name1 = LdapDN .normalize( "2.5.4.11= Some People + domainComponent= And Some anImAls,DomainComponent = eXample,0.9.2342.19200300.100.1.25= cOm", oids ); Name name2 = LdapDN .normalize( "2.5.4.11=some people+domainComponent=and some animals,DomainComponent=example,0.9.2342.19200300.100.1.25=com", oids ); assertEquals( name1.hashCode(), name2.hashCode() ); } /** * Test for DIRSERVER-191 */ @Test public void testName() throws NamingException { Name jName = new javax.naming.ldap.LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.toString(), "cn=four,cn=three,cn=two,cn=one" ); assertEquals( aName.toString(), "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetPrefixName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.getPrefix( 0 ).toString(), aName.getPrefix( 0 ).toString() ); assertEquals( jName.getPrefix( 1 ).toString(), aName.getPrefix( 1 ).toString() ); assertEquals( jName.getPrefix( 2 ).toString(), aName.getPrefix( 2 ).toString() ); assertEquals( jName.getPrefix( 3 ).toString(), aName.getPrefix( 3 ).toString() ); assertEquals( jName.getPrefix( 4 ).toString(), aName.getPrefix( 4 ).toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetSuffix() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.getSuffix( 0 ).toString(), aName.getSuffix( 0 ).toString() ); assertEquals( jName.getSuffix( 1 ).toString(), aName.getSuffix( 1 ).toString() ); assertEquals( jName.getSuffix( 2 ).toString(), aName.getSuffix( 2 ).toString() ); assertEquals( jName.getSuffix( 3 ).toString(), aName.getSuffix( 3 ).toString() ); assertEquals( jName.getSuffix( 4 ).toString(), aName.getSuffix( 4 ).toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddStringName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.add( "cn=five" ) ); assertSame( aName, aName.add( "cn=five" ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddIntString() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.add( 0, "cn=zero" ) ); assertSame( aName, aName.add( 0, "cn=zero" ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.add( 2, "cn=one.5" ) ); assertSame( aName, aName.add( 2, "cn=one.5" ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.add( jName.size(), "cn=five" ) ); assertSame( aName, aName.add( aName.size(), "cn=five" ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddAllName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.addAll( new LdapName( "cn=seven,cn=six" ) ) ); assertSame( aName, aName.addAll( new LdapDN( "cn=seven,cn=six" ) ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testAddAllIntName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertSame( jName, jName.addAll( 0, new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( 0, new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.addAll( 2, new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( 2, new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); assertSame( jName, jName.addAll( jName.size(), new LdapName( "cn=zero,cn=zero.5" ) ) ); assertSame( aName, aName.addAll( aName.size(), new LdapDN( "cn=zero,cn=zero.5" ) ) ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testStartsWithName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.startsWith( new LdapName( "cn=seven,cn=six,cn=five" ) ), aName.startsWith( new LdapDN( "cn=seven,cn=six,cn=five" ) ) ); assertEquals( jName.startsWith( new LdapName( "cn=three,cn=two,cn=one" ) ), aName.startsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); } /** * Test for DIRSERVER-191 */ @Test public void testEndsWithName() throws NamingException { Name name0 = new LdapName( "cn=zero" ); Name name10 = new LdapName( "cn=one,cn=zero" ); Name name210 = new LdapName( "cn=two,cn=one,cn=zero" ); Name name3210 = new LdapName( "cn=three,cn=two,cn=one,cn=zero" ); Name name321 = new LdapName( "cn=three,cn=two,cn=one" ); Name name32 = new LdapName( "cn=three,cn=two" ); Name name3 = new LdapName( "cn=three" ); Name name21 = new LdapName( "cn=two,cn=one" ); Name name2 = new LdapName( "cn=two" ); Name name1 = new LdapName( "cn=one" ); // Check with Name assertTrue( name0.startsWith( name0 ) ); assertTrue( name10.startsWith( name0 ) ); assertTrue( name210.startsWith( name0 ) ); assertTrue( name3210.startsWith( name0 ) ); assertTrue( name10.startsWith( name10 ) ); assertTrue( name210.startsWith( name10 ) ); assertTrue( name3210.startsWith( name10 ) ); assertTrue( name210.startsWith( name210 ) ); assertTrue( name3210.startsWith( name210 ) ); assertTrue( name3210.startsWith( name3210 ) ); assertTrue( name3210.endsWith( name3 ) ); assertTrue( name3210.endsWith( name32 ) ); assertTrue( name3210.endsWith( name321 ) ); assertTrue( name3210.endsWith( name3210 ) ); assertTrue( name210.endsWith( name2 ) ); assertTrue( name210.endsWith( name21 ) ); assertTrue( name210.endsWith( name210 ) ); assertTrue( name10.endsWith( name1 ) ); assertTrue( name10.endsWith( name10 ) ); assertTrue( name0.endsWith( name0 ) ); // Check with DN Name n0 = new LdapDN( "cn=zero" ); Name n10 = new LdapDN( "cn=one,cn=zero" ); Name n210 = new LdapDN( "cn=two,cn=one,cn=zero" ); Name n3210 = new LdapDN( "cn=three,cn=two,cn=one,cn=zero" ); Name n321 = new LdapDN( "cn=three,cn=two,cn=one" ); Name n32 = new LdapDN( "cn=three,cn=two" ); Name n3 = new LdapDN( "cn=three" ); Name n21 = new LdapDN( "cn=two,cn=one" ); Name n2 = new LdapDN( "cn=two" ); Name n1 = new LdapDN( "cn=one" ); assertTrue( n3210.endsWith( n3 ) ); assertTrue( n3210.endsWith( n32 ) ); assertTrue( n3210.endsWith( n321 ) ); assertTrue( n3210.endsWith( n3210 ) ); assertTrue( n210.endsWith( n2 ) ); assertTrue( n210.endsWith( n21 ) ); assertTrue( n210.endsWith( n210 ) ); assertTrue( n10.endsWith( n1 ) ); assertTrue( n10.endsWith( n10 ) ); assertTrue( n0.endsWith( n0 ) ); // Check with DN/Name now assertTrue( n3210.endsWith( name3 ) ); assertTrue( n3210.endsWith( name32 ) ); assertTrue( n3210.endsWith( name321 ) ); assertTrue( n3210.endsWith( name3210 ) ); assertTrue( n210.endsWith( name2 ) ); assertTrue( n210.endsWith( name21 ) ); assertTrue( n210.endsWith( name210 ) ); assertTrue( n10.endsWith( name1 ) ); assertTrue( n10.endsWith( name10 ) ); assertTrue( n0.endsWith( name0 ) ); Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.endsWith( new LdapName( "cn=seven,cn=six,cn=five" ) ), aName.endsWith( new LdapDN( "cn=seven,cn=six,cn=five" ) ) ); assertEquals( jName.endsWith( new LdapName( "cn=three,cn=two,cn=one" ) ), aName.endsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); assertEquals( jName.endsWith( new LdapName( "cn=two,cn=one" ) ), aName.endsWith( new LdapDN( "cn=three,cn=two,cn=one" ) ) ); assertTrue( aName.endsWith( new LdapName( "cn=four,cn=three" ) ) ); } /** * Test for DIRSERVER-191 */ @Test public void testRemoveName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); assertEquals( jName.remove( 0 ).toString(), aName.remove( 0 ).toString() ); assertEquals( jName.toString(), aName.toString() ); assertEquals( jName.remove( jName.size() - 1 ).toString(), aName.remove( aName.size() - 1 ).toString() ); assertEquals( jName.toString(), aName.toString() ); } /** * Test for DIRSERVER-191 */ @Test public void testGetAllName() throws NamingException { Name jName = new LdapName( "cn=four,cn=three,cn=two,cn=one" ); Name aName = new LdapDN( "cn=four,cn=three,cn=two,cn=one" ); Enumeration<String> j = jName.getAll(); Enumeration<String> a = aName.getAll(); while ( j.hasMoreElements() ) { assertTrue( j.hasMoreElements() ); assertEquals( j.nextElement(), a.nextElement() ); } } /** * Test for DIRSERVER-642 * @throws NamingException */ @Test public void testDoubleQuoteInNameDIRSERVER_642() throws NamingException { Name name1 = new LdapDN( "cn=\"Kylie Minogue\",dc=example,dc=com" ); Name name2 = new LdapName( "cn=\"Kylie Minogue\",dc=example,dc=com" ); Enumeration<String> j = name1.getAll(); Enumeration<String> a = name2.getAll(); while ( j.hasMoreElements() ) { assertTrue( j.hasMoreElements() ); assertEquals( j.nextElement(), a.nextElement() ); } } /** * Test for DIRSERVER-642 * @throws NamingException */ @Test public void testDoubleQuoteInNameDIRSERVER_642_1() throws NamingException { LdapDN dn = new LdapDN( "cn=\" Kylie Minogue \",dc=example,dc=com" ); assertEquals( "cn=\" Kylie Minogue \",dc=example,dc=com", dn.getUpName() ); assertEquals( "cn=\\ Kylie Minogue\\ ,dc=example,dc=com", dn.toString() ); } /** * Test for DIRSTUDIO-250 * @throws NamingException */ @Test public void testDoubleQuoteWithSpecialCharsInNameDIRSERVER_250() throws NamingException { LdapDN dn = new LdapDN( "a=\"b,c\"" ); assertEquals( "a=\"b,c\"", dn.getUpName() ); assertEquals( "a=b\\,c", dn.toString() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testLeadingAndTrailingSpacesDIRSERVER_184() throws NamingException { LdapDN name = new LdapDN( "dn= \\ four spaces leading and 3 trailing \\ " ); assertEquals( "dn=\\ four spaces leading and 3 trailing \\ ", name.toString() ); assertEquals( "dn= \\ four spaces leading and 3 trailing \\ ", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_1() { try { new LdapDN( "dn=middle\\ spaces" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_2() { try { new LdapDN( "dn=# a leading pound" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_3() throws NamingException { LdapDN name = new LdapDN( "dn=\\# a leading pound" ); assertEquals( "dn=\\# a leading pound", name.toString() ); assertEquals( "dn=\\# a leading pound", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_4() throws NamingException { LdapDN name = new LdapDN( "dn=a middle \\# pound" ); assertEquals( "dn=a middle # pound", name.toString() ); assertEquals( "dn=a middle \\# pound", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_5() throws NamingException { LdapDN name = new LdapDN( "dn=a trailing pound \\#" ); assertEquals( "dn=a trailing pound #", name.toString() ); assertEquals( "dn=a trailing pound \\#", name.getUpName() ); } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_6() { try { new LdapDN( "dn=a middle # pound" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } /** * Test for DIRSERVER-184 * @throws NamingException */ @Test public void testDIRSERVER_184_7() { try { new LdapDN( "dn=a trailing pound #" ); } catch ( InvalidNameException ine ) { assertTrue( true ); } } @Test public void testDIRSERVER_631_1() throws NamingException { LdapDN name = new LdapDN( "cn=Bush\\, Kate,dc=example,dc=com" ); assertEquals( "cn=Bush\\, Kate,dc=example,dc=com", name.toString() ); assertEquals( "cn=Bush\\, Kate,dc=example,dc=com", name.getUpName() ); } /** * Added a test to check the parsing of a DN with more than one RDN * which are OIDs, and with one RDN which has more than one atav. * @throws NamingException */ @Test public void testDNWithMultiOidsRDN() throws NamingException { LdapDN name = new LdapDN( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us" ); assertEquals( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us", name .toString() ); assertEquals( "0.9.2342.19200300.100.1.1=00123456789+2.5.4.3=pablo picasso,2.5.4.11=search,2.5.4.10=imc,2.5.4.6=us", name .getUpName() ); } @Test public void testNameAddAll() throws NamingException { Properties props = new Properties(); props.setProperty( "jndi.syntax.direction", "right_to_left" ); props.setProperty( "jndi.syntax.separator", "," ); props.setProperty( "jndi.syntax.ignorecase", "true" ); props.setProperty( "jndi.syntax.trimblanks", "true" ); Name dn = new CompoundName( "cn=blah,dc=example,dc=com", props ); LdapDN ldapDn = new LdapDN(); ldapDn.addAll( 0, dn ); assertEquals( "cn=blah,dc=example,dc=com", ldapDn.toString() ); dn = new CompoundName( "cn=blah,dc=example,dc=com", props ); ldapDn = new LdapDN( "cn=xyz" ); ldapDn.addAll( 0, dn ); assertEquals( "cn=xyz,cn=blah,dc=example,dc=com", ldapDn.toString() ); } @Test public void testDNEquals() throws NamingException { LdapDN dn1 = new LdapDN( "a=b,c=d,e=f" ); LdapDN dn2 = new LdapDN( "a=b\\,c\\=d,e=f" ); assertFalse( dn1.toString().equals( dn2.toString() ) ); } @Test public void testDNAddEmptyString() throws NamingException { LdapDN dn = new LdapDN(); assertTrue( dn.size() == 0 ); assertTrue( dn.add( "" ).size() == 0 ); } /** * This leads to the bug in DIRSERVER-832. */ @Test public void testPreserveAttributeIdCase() throws NamingException { LdapDN dn = new LdapDN( "uID=kevin" ); assertEquals( "uID", dn.getRdn().getUpType() ); } /** * Tests the LdapDN.isValid() method. */ @Test public void testIsValid() { assertTrue( LdapDN.isValid( "" ) ); assertFalse( LdapDN.isValid( "a" ) ); assertFalse( LdapDN.isValid( "a " ) ); assertTrue( LdapDN.isValid( "a=" ) ); assertTrue( LdapDN.isValid( "a= " ) ); assertFalse( LdapDN.isValid( "=" ) ); assertFalse( LdapDN.isValid( " = " ) ); assertFalse( LdapDN.isValid( " = a" ) ); } private ByteArrayOutputStream serializeDN( LdapDN dn ) throws IOException { ObjectOutputStream oOut = null; ByteArrayOutputStream out = new ByteArrayOutputStream(); try { oOut = new ObjectOutputStream( out ); oOut.writeObject( dn ); } catch ( IOException ioe ) { throw ioe; } finally { try { if ( oOut != null ) { oOut.flush(); oOut.close(); } } catch ( IOException ioe ) { throw ioe; } } return out; } private LdapDN deserializeDN( ByteArrayOutputStream out ) throws IOException, ClassNotFoundException { ObjectInputStream oIn = null; ByteArrayInputStream in = new ByteArrayInputStream( out.toByteArray() ); try { oIn = new ObjectInputStream( in ); LdapDN dn = ( LdapDN ) oIn.readObject(); return dn; } catch ( IOException ioe ) { throw ioe; } finally { try { if ( oIn != null ) { oIn.close(); } } catch ( IOException ioe ) { throw ioe; } } } /** * Test the serialization of a DN * * @throws Exception */ @Test public void testNameSerialization() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } @Test public void testSerializeEmptyDN() throws Exception { LdapDN dn = LdapDN.EMPTY_LDAPDN; assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } /** * Test the serialization of a DN * * @throws Exception */ @Test public void testNameStaticSerialization() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); LdapDNSerializer.serialize( dn, out ); out.flush(); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); assertEquals( dn, LdapDNSerializer.deserialize( in ) ); } /* @Test public void testSerializationPerfs() throws Exception { LdapDN dn = new LdapDN( "ou= Some People + dc= And Some anImAls,dc = eXample,dc= cOm" ); dn.normalize( oids ); long t0 = System.currentTimeMillis(); for ( int i = 0; i < 1000; i++ ) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); DnSerializer.serialize( dn, out ); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); LdapDN dn1 = DnSerializer.deserialize( in ); } long t1 = System.currentTimeMillis(); System.out.println( "delta :" + ( t1 - t0) ); long t2 = System.currentTimeMillis(); for ( int i = 0; i < 1000000; i++ ) { //ByteArrayOutputStream baos = new ByteArrayOutputStream(); //ObjectOutputStream out = new ObjectOutputStream( baos ); //DnSerializer.serializeString( dn, out ); //byte[] data = baos.toByteArray(); //ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); //LdapDN dn1 = DnSerializer.deserializeString( in, oids ); dn.normalize( oids ); } long t3 = System.currentTimeMillis(); System.out.println( "delta :" + ( t3 - t2) ); //assertEquals( dn, DnSerializer.deserialize( in ) ); } */ @Test public void testStaticSerializeEmptyDN() throws Exception { LdapDN dn = LdapDN.EMPTY_LDAPDN; ByteArrayOutputStream baos = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream( baos ); LdapDNSerializer.serialize( dn, out ); out.flush(); byte[] data = baos.toByteArray(); ObjectInputStream in = new ObjectInputStream( new ByteArrayInputStream( data ) ); assertEquals( dn, LdapDNSerializer.deserialize( in ) ); assertEquals( dn, deserializeDN( serializeDN( dn ) ) ); } @Test public void testCompositeRDN() throws InvalidNameException { assertTrue( LdapDN.isValid( "a=b+c=d+e=f,g=h" ) ); LdapDN dn = new LdapDN( "a=b+c=d+e=f,g=h" ); assertEquals( "a=b+c=d+e=f,g=h", dn.toString() ); } @Test public void testCompositeRDNOids() throws InvalidNameException { assertTrue( LdapDN.isValid( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ) ); LdapDN dn = new LdapDN( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com" ); assertEquals( "1.2.3.4.5=0+1.2.3.4.6=0+1.2.3.4.7=omnischmomni,2.5.4.3=subtree,0.9.2342.19200300.100.1.25=example,0.9.2342.19200300.100.1.25=com", dn.toString() ); } /** * Tests that AttributeTypeAndValues are correctly trimmed. */ @Test public void testTrimAtavs() throws InvalidNameException { // antlr parser: string value with trailing spaces LdapDN dn1 = new LdapDN( " cn = Amos\\,Tori , ou=system " ); assertEquals( " cn = Amos\\,Tori ", dn1.getRdn().getUpName() ); AttributeTypeAndValue atav1 = dn1.getRdn().getAtav(); assertEquals( "cn", atav1.getUpType() ); assertEquals( "Amos\\,Tori", atav1.getUpValue().getString() ); // antlr parser: hexstring with trailing spaces LdapDN dn3 = new LdapDN( " cn = #414243 , ou=system " ); assertEquals( " cn = #414243 ", dn3.getRdn().getUpName() ); AttributeTypeAndValue atav3 = dn3.getRdn().getAtav(); assertEquals( "cn", atav3.getUpType() ); assertEquals( "#414243", atav3.getUpValue().getString() ); assertTrue( Arrays.equals( StringTools.getBytesUtf8( "ABC" ),atav3.getNormValue().getBytes() ) ); // antlr parser: LdapDN dn4 = new LdapDN( " cn = \\41\\42\\43 , ou=system " ); assertEquals( " cn = \\41\\42\\43 ", dn4.getRdn().getUpName() ); AttributeTypeAndValue atav4 = dn4.getRdn().getAtav(); assertEquals( "cn", atav4.getUpType() ); assertEquals( "\\41\\42\\43", atav4.getUpValue().getString() ); assertEquals( "ABC", atav4.getNormValue().getString() ); // antlr parser: quotestring with trailing spaces LdapDN dn5 = new LdapDN( " cn = \"ABC\" , ou=system " ); assertEquals( " cn = \"ABC\" ", dn5.getRdn().getUpName() ); AttributeTypeAndValue atav5 = dn5.getRdn().getAtav(); assertEquals( "cn", atav5.getUpType() ); assertEquals( "\"ABC\"", atav5.getUpValue().getString() ); assertEquals( "ABC", atav5.getNormValue().getString() ); // fast parser: string value with trailing spaces LdapDN dn2 = new LdapDN( " cn = Amos Tori , ou=system " ); assertEquals( " cn = Amos Tori ", dn2.getRdn().getUpName() ); AttributeTypeAndValue atav2 = dn2.getRdn().getAtav(); assertEquals( "cn", atav2.getUpType() ); assertEquals( "Amos Tori", atav2.getUpValue().getString() ); } }
Added a test for getRdn( int ) git-svn-id: a98780f44e7643575d86f056c30a4189ca15db44@803800 13f79535-47bb-0310-9956-ffa450edef68
ldap/src/test/java/org/apache/directory/shared/ldap/name/LdapDNTest.java
Added a test for getRdn( int )
Java
apache-2.0
a726ee5d3b3779d6fe4801c7e251c0a51f1dc1da
0
iLib-js/iLib,iLib-js/iLib,iLib-js/iLib,iLib-js/iLib,iLib-js/iLib
/* * JSFile.java - * * Copyright © 2012-2015, JEDLSoft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.ilib.tools.jsa; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStreamReader; import java.io.Reader; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONObject; import org.json.JSONTokener; import com.ilib.IlibLocale; /** * JSFile * * @author edwin */ public class JSFile extends AssemblyFile { protected Logger logger = Logger.getLogger(this.getClass()); protected ArrayList<Pattern> dependsPatterns = new ArrayList<Pattern>(); protected ArrayList<Pattern> dataPatterns = new ArrayList<Pattern>(); protected ArrayList<Pattern> macroPatterns = new ArrayList<Pattern>(); protected JSONObject zonetab = null; protected ArrayList<Pattern> deletePatterns = new ArrayList<Pattern>(); public JSFile(File file) { super(file); dependsPatterns.add(Pattern.compile("/\\*\\s*!depends\\s*([^\\*]+)\\*/")); dependsPatterns.add(Pattern.compile("\\/\\/\\s*!depends\\s*([^\\n]+)")); dataPatterns.add(Pattern.compile("/\\*\\s*!data\\s*([^\\*]+)\\*/")); dataPatterns.add(Pattern.compile("\\/\\/\\s*!data\\s*([^\\n]+)")); macroPatterns.add(Pattern.compile("/\\*\\s*!macro\\s*([^\\*]+)\\*/")); macroPatterns.add(Pattern.compile("\\/\\/\\s*!macro\\s*(\\S*)")); deletePatterns.add(Pattern.compile("var ilib = ilib \\|\\| \\{\\};\\n")); deletePatterns.add(Pattern.compile("module.exports = function(?s).*\\n};$")); } /** * Find a javascript file somewhere on the include path. * * @param includePath list of directories to search * @param fileName relative path to the javascript file * @param allFiles cache of all files already processed * @return a JSFile instance * @throws Exception if the file cannot be found */ protected AssemblyFile find(ArrayList<File> includePath, String fileName, HashMap<String, AssemblyFile> allFiles) throws Exception { int i = 0; File newFile = new File(includePath.get(i), fileName); JSFile jsf; while ( !newFile.canRead() && i < includePath.size() ) { newFile = new File(includePath.get(i++), fileName); } if ( !newFile.canRead() ) { throw new Exception("Cannot find file " + fileName + " which " + file.getPath() + " depends upon."); } if ( allFiles.containsKey(newFile.getPath()) ) { return allFiles.get(newFile.getPath()); } jsf = new JSFile(newFile); allFiles.put(newFile.getPath(), jsf); return jsf; } /** * Locate a json file somewhere on the include path. * * @param includePath list of directories to search * @param baseName the base name of the json file without the locale spec attached * @param fileName relative path to the javascript file * @param allFiles cache of all files already processed */ protected JSONFile locate(ArrayList<File> includePath, String baseName, String fileName, HashMap<String, AssemblyFile> allFiles) { int i = 0; File newFile = new File(includePath.get(i), fileName); JSONFile json = null; while ( !newFile.canRead() && i < includePath.size() ) { logger.debug("Checking path " + newFile.getPath()); newFile = new File(includePath.get(i++), fileName); } if ( newFile.canRead() ) { logger.debug("Found data file " + newFile.getPath()); if ( allFiles.containsKey(newFile.getPath()) ) { json = (JSONFile) allFiles.get(newFile.getPath()); } else { json = new JSONFile(newFile, baseName); allFiles.put(newFile.getPath(), json); } dependencies.add(json); json.addParent(this); } return json; } /** * Find all json files for a particular locale with the given baseName. * * @param includePath * @param baseName * @param locale * @param allFiles * @throws Exception */ protected void findAllForLocale(ArrayList<File> includePath, String baseName, IlibLocale locale, HashMap<String, AssemblyFile> allFiles) throws Exception { String baseFileName = baseName + ".json"; String massagedBaseName = baseName.replaceAll("/", "_"); StringBuilder localeDir = new StringBuilder(); String fileName; if ( locale.getLanguage() != null && locale.getLanguage().length() > 0 ) { localeDir.append(locale.getLanguage()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getScript() != null && locale.getScript().length() > 0) { localeDir.append("/"); localeDir.append(locale.getScript()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } else if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } localeDir = new StringBuilder(); if ( locale.getScript() != null && locale.getScript().length() > 0) { localeDir.append(locale.getScript()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } localeDir = new StringBuilder(); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append(locale.getRegion()); fileName = "und/" + localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = "und/" + localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } localeDir = new StringBuilder(); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } /** * Find all json files for a given basename across all locales. This method always finds * the generic shared json, even if the locale list is empty, so that there is always a * default if a particular ilib function does not have locale data or if the copy of ilib * is used with dynamic loading instead of preassembled data. * * @param includePath * @param locales * @param baseName * @param allFiles * @throws Exception */ protected void findAll(ArrayList<File> includePath, ArrayList<IlibLocale> locales, String baseName, HashMap<String, AssemblyFile> allFiles) throws Exception { if ( locales != null && locales.size() > 0 ) { locate(includePath, baseName.replaceAll("/", "_"), baseName + ".json", allFiles); for ( int i = 0; i < locales.size(); i++ ) { findAllForLocale(includePath, baseName, locales.get(i), allFiles); } } } /** * Load in the zoneinfo files that are relevant to all the given locales. Basically, this looks up each * locale's country in the zonetab.json file, and adds all time zones it finds there. * * @param includePath * @param locales * @param allFiles * @throws Exception if something went wrong or if the zonetab.json file could not be found */ protected void findZones(ArrayList<File> includePath, ArrayList<IlibLocale> locales, HashMap<String, AssemblyFile> allFiles) throws Exception { File dir = null; logger.debug("Creating dependencies on zoneinfo files"); JSONTokener tokenizer; for ( int i = 0; i < includePath.size(); i++ ) { dir = new File(includePath.get(i), "zoneinfo"); File f = new File(dir, "zonetab.json"); if ( f.exists() ) { try (Reader rdr = new InputStreamReader(new FileInputStream(f), "utf-8")) { tokenizer = new JSONTokener(rdr); zonetab = new JSONObject(tokenizer); logger.debug("Successfully read in the zonetab.json file"); break; } } } if ( dir == null ) { throw new Exception("Could not find zoneinfo files in any of the include directories."); } for (IlibLocale loc: locales) { logger.debug("Creating for region " + loc.getRegion()); JSONArray zones = zonetab.optJSONArray(loc.getRegion()); if ( zones != null ) { for ( int i = 0; i < zones.length(); i++ ) { String zone = zones.getString(i); logger.debug("Creating dependency on zoneinfo " + zone); locate(includePath, "zoneinfo[\"" + zone + "\"]", "zoneinfo/" + zone + ".json", allFiles); } } } // add in all generic zones no matter what the locales are if ( locales != null && locales.size() > 0 ) { logger.debug("Searching directory for generic files: " + dir.toString()); File[] files = dir.listFiles(); if ( files != null ) { for ( int i = 0; i < files.length; i++ ) { if (files[i].isFile() && files[i].getName().endsWith(".json")) { logger.debug("Adding generic tz file " + files[i].getPath()); JSONFile json; if ( allFiles.containsKey(files[i].getPath()) ) { json = (JSONFile) allFiles.get(files[i].getPath()); } else { json = new JSONFile(files[i], "zoneinfo[\"" + files[i].getName().replaceAll("\\.json$", "") + "\"]"); allFiles.put(files[i].getPath(), json); } dependencies.add(json); json.addParent(this); } } } File etcDir = new File(dir, "Etc"); files = etcDir.listFiles(); if ( files != null ) { for ( int i = 0; i < files.length; i++ ) { if (files[i].isFile() && files[i].getName().endsWith(".json")) { logger.debug("Adding generic tz file " + files[i].getPath()); JSONFile json; if ( allFiles.containsKey(files[i].getPath()) ) { json = (JSONFile) allFiles.get(files[i].getPath()); } else { json = new JSONFile(files[i], "zoneinfo[\"Etc/" + files[i].getName().replaceAll("\\.json$", "") + "\"]"); allFiles.put(files[i].getPath(), json); } dependencies.add(json); json.addParent(this); } } } locate(includePath, "timezone.zonetab", "zoneinfo\\/zonetab.json", allFiles); } } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#process(java.util.ArrayList, java.util.ArrayList, java.util.HashMap) */ public void process(ArrayList<File> includePath, ArrayList<IlibLocale> locales, HashMap<String, AssemblyFile> allFiles) throws Exception { int start = 0, nameStart, groupEnd, i; StringBuffer str; String fileName; logger.debug("Processing file " + file.getPath()); if ( isProcessed() ) { // don't include the same file more than once logger.debug("Already processed file " + file.getPath() + " before."); return; } if ( !allFiles.containsKey(file.getPath()) ) { allFiles.put(file.getPath(), this); } try { str = readFile(); for ( int p = 0; p < dependsPatterns.size(); p++ ) { Matcher matcher = dependsPatterns.get(p).matcher(str); start = 0; while ( matcher.find(start) ) { start = matcher.end(); i = matcher.start(1); groupEnd = matcher.end(1); while ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } fileName = str.substring(nameStart, i); if ( fileName.length() > 0 ) { AssemblyFile jsfile = find(includePath, fileName, allFiles); logger.debug("Found dependency: " + file.getPath() + " -> " + jsfile.getPath()); dependencies.add(jsfile); jsfile.addParent(this); } while ( i < groupEnd && Character.isWhitespace(str.charAt(i)) ) { i++; } } } } for ( int p = 0; p < dataPatterns.size(); p++ ) { Matcher matcher = dataPatterns.get(p).matcher(str); start = 0; while ( matcher.find(start) ) { start = matcher.end(); i = matcher.start(1); groupEnd = matcher.end(1); while ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } fileName = str.substring(nameStart, i); if ( fileName.length() > 0 ) { logger.debug("Found data dependency: " + file.getPath() + " -> " + fileName); if ( fileName.equalsIgnoreCase("zoneinfo") ) { findZones(includePath, locales, allFiles); } else { findAll(includePath, locales, fileName, allFiles); } } while ( i < groupEnd && Character.isWhitespace(str.charAt(i)) ) { i++; } } } } str = null; setProcessed(true); /* process the dependencies after we have found them all and nulled * out str so that we don't keep all the files in memory at the same * time. */ for ( i = 0; i < dependencies.size(); i++ ) { dependencies.get(i).process(includePath, locales, allFiles); } } catch ( FileNotFoundException e ) { System.err.println("Error: could not read file " + file.getPath()); throw new Exception(e); } } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#writeParents(java.io.Writer, java.util.ArrayList, java.util.ArrayList) */ public void writeParents(Writer out, ArrayList<String> visited, ArrayList<IlibLocale> locales) throws Exception { int i, j; String thisPath = getPath(); for ( i = 0; i < visited.size(); i++ ) { if ( thisPath.equals(visited.get(i)) ) { j = visited.size() - 1; String depends = "File " + thisPath + " depends on files that eventually depend on it again. Path:\n"; while ( j >= i ) { depends += visited.get(j--) + " -> \n"; } depends += thisPath + "\n"; throw new CircularDependencyException(depends); } } visited.add(thisPath); // recursively find the the source node for ( i = 0; i < parents.size(); i++ ) { parents.get(i).writeParents(out, visited, locales); } // start with a new visited array to catch circular dependencies // starting at this source node writeDependencies(out, new ArrayList<String>(), locales); } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#writeDependencies(java.io.Writer, java.util.ArrayList) */ public void writeDependencies(Writer out, ArrayList<String> visited, ArrayList<IlibLocale> locales) throws Exception { int i; String thisPath = getPath(); if ( isWritten() ) { // already did this one return; } for ( i = 0; i < visited.size(); i++ ) { if ( thisPath.equals(visited.get(i)) ) { String depends = "File " + thisPath + " depends on files that eventually depend on it again. Path:\n"; while ( i < visited.size() ) { depends += visited.get(i++) + " ->\n"; } depends += thisPath + "\n"; throw new CircularDependencyException(depends); } } visited.add(thisPath); // do the dependencies first before the contents of this node for ( i = 0; i < dependencies.size(); i++ ) { dependencies.get(i).writeDependencies(out, visited, locales); } StringBuffer str; logger.debug("Now writing out file " + getPath()); try { int groupEnd, nameStart; String macroName; Matcher matcher; str = readFile(); // remove the parts that are not needed for assembled files for ( int p = 0; p < deletePatterns.size(); p++ ) { matcher = deletePatterns.get(p).matcher(str); if ( matcher.find() ) { str = str.replace(matcher.start(), matcher.end(), ""); } } for ( int p = 0; p < macroPatterns.size(); p++ ) { matcher = macroPatterns.get(p).matcher(str); while ( matcher.find() ) { i = matcher.start(1); groupEnd = matcher.end(1); if ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } macroName = str.substring(nameStart, i); if ( macroName.length() > 0 ) { if ( macroName.equalsIgnoreCase("localelist") ) { StringBuffer sb = new StringBuffer(); for ( int j = 0; j < locales.size(); j++ ) { if ( j > 0 ) { sb.append(','); } sb.append('"'); sb.append(locales.get(j).toString()); sb.append('"'); } str = str.replace(matcher.start(), matcher.end(), sb.toString()); matcher.reset(); } else if ( macroName.equalsIgnoreCase("ilibVersion") ) { StringBuffer sb = new StringBuffer(); sb.append('"'); sb.append(JSAssemble.version); sb.append('"'); str = str.replace(matcher.start(), matcher.end(), sb.toString()); matcher.reset(); } } } } } out.write(str.toString()); out.append('\n'); // in case the file doesn't end with one setWritten(true); } catch ( FileNotFoundException e ) { System.err.println("Error: could not read file " + file.getPath()); throw new Exception(e); } } }
java/src/com/ilib/tools/jsa/JSFile.java
/* * JSFile.java - * * Copyright © 2012-2013, JEDLSoft * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package com.ilib.tools.jsa; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStreamReader; import java.io.Reader; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.log4j.Logger; import org.json.JSONArray; import org.json.JSONObject; import org.json.JSONTokener; import com.ilib.IlibLocale; /** * JSFile * * @author edwin */ public class JSFile extends AssemblyFile { protected Logger logger = Logger.getLogger(this.getClass()); protected ArrayList<Pattern> dependsPatterns = new ArrayList<Pattern>(); protected ArrayList<Pattern> dataPatterns = new ArrayList<Pattern>(); protected ArrayList<Pattern> macroPatterns = new ArrayList<Pattern>(); protected JSONObject zonetab = null; protected ArrayList<Pattern> deletePatterns = new ArrayList<Pattern>(); public JSFile(File file) { super(file); dependsPatterns.add(Pattern.compile("/\\*\\s*!depends\\s*([^\\*]+)\\*/")); dependsPatterns.add(Pattern.compile("\\/\\/\\s*!depends\\s*([^\\n]+)")); dataPatterns.add(Pattern.compile("/\\*\\s*!data\\s*([^\\*]+)\\*/")); dataPatterns.add(Pattern.compile("\\/\\/\\s*!data\\s*([^\\n]+)")); macroPatterns.add(Pattern.compile("/\\*\\s*!macro\\s*([^\\*]+)\\*/")); macroPatterns.add(Pattern.compile("\\/\\/\\s*!macro\\s*(\\S*)")); deletePatterns.add(Pattern.compile("var ilib = ilib \\|\\| \\{\\};\\n")); deletePatterns.add(Pattern.compile("module.exports = function(?s).*\\n};$")); } /** * Find a javascript file somewhere on the include path. * * @param includePath list of directories to search * @param fileName relative path to the javascript file * @param allFiles cache of all files already processed * @return a JSFile instance * @throws Exception if the file cannot be found */ protected AssemblyFile find(ArrayList<File> includePath, String fileName, HashMap<String, AssemblyFile> allFiles) throws Exception { int i = 0; File newFile = new File(includePath.get(i), fileName); JSFile jsf; while ( !newFile.canRead() && i < includePath.size() ) { newFile = new File(includePath.get(i++), fileName); } if ( !newFile.canRead() ) { throw new Exception("Cannot find file " + fileName + " which " + file.getPath() + " depends upon."); } if ( allFiles.containsKey(newFile.getPath()) ) { return allFiles.get(newFile.getPath()); } jsf = new JSFile(newFile); allFiles.put(newFile.getPath(), jsf); return jsf; } /** * Locate a json file somewhere on the include path. * * @param includePath list of directories to search * @param baseName the base name of the json file without the locale spec attached * @param fileName relative path to the javascript file * @param allFiles cache of all files already processed */ protected JSONFile locate(ArrayList<File> includePath, String baseName, String fileName, HashMap<String, AssemblyFile> allFiles) { int i = 0; File newFile = new File(includePath.get(i), fileName); JSONFile json = null; while ( !newFile.canRead() && i < includePath.size() ) { logger.debug("Checking path " + newFile.getPath()); newFile = new File(includePath.get(i++), fileName); } if ( newFile.canRead() ) { logger.debug("Found data file " + newFile.getPath()); if ( allFiles.containsKey(newFile.getPath()) ) { json = (JSONFile) allFiles.get(newFile.getPath()); } else { json = new JSONFile(newFile, baseName); allFiles.put(newFile.getPath(), json); } dependencies.add(json); json.addParent(this); } return json; } /** * Find all json files for a particular locale with the given baseName. * * @param includePath * @param baseName * @param locale * @param allFiles * @throws Exception */ protected void findAllForLocale(ArrayList<File> includePath, String baseName, IlibLocale locale, HashMap<String, AssemblyFile> allFiles) throws Exception { String baseFileName = baseName + ".json"; String massagedBaseName = baseName.replaceAll("/", "_"); StringBuilder localeDir = new StringBuilder(); String fileName; if ( locale.getLanguage() != null && locale.getLanguage().length() > 0 ) { localeDir.append(locale.getLanguage()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getScript() != null && locale.getScript().length() > 0) { localeDir.append("/"); localeDir.append(locale.getScript()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } else if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } localeDir = new StringBuilder(); if ( locale.getScript() != null && locale.getScript().length() > 0) { localeDir.append(locale.getScript()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append("/"); localeDir.append(locale.getRegion()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } } localeDir = new StringBuilder(); if ( locale.getRegion() != null && locale.getRegion().length() > 0) { localeDir.append(locale.getRegion()); fileName = "und/" + localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append("/"); localeDir.append(locale.getVariant()); fileName = "und/" + localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } localeDir = new StringBuilder(); if ( locale.getVariant() != null && locale.getVariant().length() > 0) { localeDir.append(locale.getVariant()); fileName = localeDir + "/" + baseFileName; locate(includePath, massagedBaseName+"_"+localeDir.toString().replace('/', '_'), fileName, allFiles); } } /** * Find all json files for a given basename across all locales. This method always finds * the generic shared json, even if the locale list is empty, so that there is always a * default if a particular ilib function does not have locale data or if the copy of ilib * is used with dynamic loading instead of preassembled data. * * @param includePath * @param locales * @param baseName * @param allFiles * @throws Exception */ protected void findAll(ArrayList<File> includePath, ArrayList<IlibLocale> locales, String baseName, HashMap<String, AssemblyFile> allFiles) throws Exception { if ( locales != null && locales.size() > 0 ) { locate(includePath, baseName.replaceAll("/", "_"), baseName + ".json", allFiles); for ( int i = 0; i < locales.size(); i++ ) { findAllForLocale(includePath, baseName, locales.get(i), allFiles); } } } /** * Load in the zoneinfo files that are relevant to all the given locales. Basically, this looks up each * locale's country in the zonetab.json file, and adds all time zones it finds there. * * @param includePath * @param locales * @param allFiles * @throws Exception if something went wrong or if the zonetab.json file could not be found */ protected void findZones(ArrayList<File> includePath, ArrayList<IlibLocale> locales, HashMap<String, AssemblyFile> allFiles) throws Exception { File dir = null; logger.debug("Creating dependencies on zoneinfo files"); JSONTokener tokenizer; for ( int i = 0; i < includePath.size(); i++ ) { dir = new File(includePath.get(i), "zoneinfo"); File f = new File(dir, "zonetab.json"); if ( f.exists() ) { try (Reader rdr = new InputStreamReader(new FileInputStream(f), "utf-8")) { tokenizer = new JSONTokener(rdr); zonetab = new JSONObject(tokenizer); logger.debug("Successfully read in the zonetab.json file"); break; } } } if ( dir == null ) { throw new Exception("Could not find zoneinfo files in any of the include directories."); } for (IlibLocale loc: locales) { logger.debug("Creating for region " + loc.getRegion()); JSONArray zones = zonetab.optJSONArray(loc.getRegion()); if ( zones != null ) { for ( int i = 0; i < zones.length(); i++ ) { String zone = zones.getString(i); logger.debug("Creating dependency on zoneinfo " + zone); locate(includePath, "zoneinfo[\"" + zone + "\"]", "zoneinfo/" + zone + ".json", allFiles); } } } // add in all generic zones no matter what the locales are if ( locales != null && locales.size() > 0 ) { logger.debug("Searching directory for generic files: " + dir.toString()); File[] files = dir.listFiles(); if ( files != null ) { for ( int i = 0; i < files.length; i++ ) { if (files[i].isFile() && files[i].getName().endsWith(".json")) { logger.debug("Adding generic tz file " + files[i].getPath()); JSONFile json; if ( allFiles.containsKey(files[i].getPath()) ) { json = (JSONFile) allFiles.get(files[i].getPath()); } else { json = new JSONFile(files[i], "zoneinfo[\"" + files[i].getName().replaceAll("\\.json$", "") + "\"]"); allFiles.put(files[i].getPath(), json); } dependencies.add(json); json.addParent(this); } } } File etcDir = new File(dir, "Etc"); files = etcDir.listFiles(); if ( files != null ) { for ( int i = 0; i < files.length; i++ ) { if (files[i].isFile() && files[i].getName().endsWith(".json")) { logger.debug("Adding generic tz file " + files[i].getPath()); JSONFile json; if ( allFiles.containsKey(files[i].getPath()) ) { json = (JSONFile) allFiles.get(files[i].getPath()); } else { json = new JSONFile(files[i], "zoneinfo[\"Etc/" + files[i].getName().replaceAll("\\.json$", "") + "\"]"); allFiles.put(files[i].getPath(), json); } dependencies.add(json); json.addParent(this); } } } locate(includePath, "timezone.zonetab", "zoneinfo\\/zonetab.json", allFiles); } } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#process(java.util.ArrayList, java.util.ArrayList, java.util.HashMap) */ public void process(ArrayList<File> includePath, ArrayList<IlibLocale> locales, HashMap<String, AssemblyFile> allFiles) throws Exception { int start = 0, nameStart, groupEnd, i; StringBuffer str; String fileName; logger.debug("Processing file " + file.getPath()); if ( isProcessed() ) { // don't include the same file more than once logger.debug("Already processed file " + file.getPath() + " before."); return; } if ( !allFiles.containsKey(file.getPath()) ) { allFiles.put(file.getPath(), this); } try { str = readFile(); for ( int p = 0; p < dependsPatterns.size(); p++ ) { Matcher matcher = dependsPatterns.get(p).matcher(str); start = 0; while ( matcher.find(start) ) { start = matcher.end(); i = matcher.start(1); groupEnd = matcher.end(1); while ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } fileName = str.substring(nameStart, i); if ( fileName.length() > 0 ) { AssemblyFile jsfile = find(includePath, fileName, allFiles); logger.debug("Found dependency: " + file.getPath() + " -> " + jsfile.getPath()); dependencies.add(jsfile); jsfile.addParent(this); } while ( i < groupEnd && Character.isWhitespace(str.charAt(i)) ) { i++; } } } } for ( int p = 0; p < dataPatterns.size(); p++ ) { Matcher matcher = dataPatterns.get(p).matcher(str); start = 0; while ( matcher.find(start) ) { start = matcher.end(); i = matcher.start(1); groupEnd = matcher.end(1); while ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } fileName = str.substring(nameStart, i); if ( fileName.length() > 0 ) { logger.debug("Found data dependency: " + file.getPath() + " -> " + fileName); if ( fileName.equalsIgnoreCase("zoneinfo") ) { findZones(includePath, locales, allFiles); } else { findAll(includePath, locales, fileName, allFiles); } } while ( i < groupEnd && Character.isWhitespace(str.charAt(i)) ) { i++; } } } } str = null; setProcessed(true); /* process the dependencies after we have found them all and nulled * out str so that we don't keep all the files in memory at the same * time. */ for ( i = 0; i < dependencies.size(); i++ ) { dependencies.get(i).process(includePath, locales, allFiles); } } catch ( FileNotFoundException e ) { System.err.println("Error: could not read file " + file.getPath()); throw new Exception(e); } } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#writeParents(java.io.Writer, java.util.ArrayList, java.util.ArrayList) */ public void writeParents(Writer out, ArrayList<String> visited, ArrayList<IlibLocale> locales) throws Exception { int i, j; String thisPath = getPath(); for ( i = 0; i < visited.size(); i++ ) { if ( thisPath.equals(visited.get(i)) ) { j = visited.size() - 1; String depends = "File " + thisPath + " depends on files that eventually depend on it again. Path:\n"; while ( j >= i ) { depends += visited.get(j--) + " -> \n"; } depends += thisPath + "\n"; throw new CircularDependencyException(depends); } } visited.add(thisPath); // recursively find the the source node for ( i = 0; i < parents.size(); i++ ) { parents.get(i).writeParents(out, visited, locales); } // start with a new visited array to catch circular dependencies // starting at this source node writeDependencies(out, new ArrayList<String>(), locales); } /* (non-Javadoc) * @see com.ilib.tools.jsa.AssemblyFile#writeDependencies(java.io.Writer, java.util.ArrayList) */ public void writeDependencies(Writer out, ArrayList<String> visited, ArrayList<IlibLocale> locales) throws Exception { int i; String thisPath = getPath(); if ( isWritten() ) { // already did this one return; } for ( i = 0; i < visited.size(); i++ ) { if ( thisPath.equals(visited.get(i)) ) { String depends = "File " + thisPath + " depends on files that eventually depend on it again. Path:\n"; while ( i < visited.size() ) { depends += visited.get(i++) + " ->\n"; } depends += thisPath + "\n"; throw new CircularDependencyException(depends); } } visited.add(thisPath); // do the dependencies first before the contents of this node for ( i = 0; i < dependencies.size(); i++ ) { dependencies.get(i).writeDependencies(out, visited, locales); } StringBuffer str; logger.debug("Now writing out file " + getPath()); try { int groupEnd, nameStart; String macroName; Matcher matcher; str = readFile(); // remove the parts that are not needed for assembled files for ( int p = 0; p < deletePatterns.size(); p++ ) { matcher = deletePatterns.get(p).matcher(str); if ( matcher.find() ) { str = str.replace(matcher.start(), matcher.end(), ""); } } for ( int p = 0; p < macroPatterns.size(); p++ ) { matcher = macroPatterns.get(p).matcher(str); while ( matcher.find() ) { i = matcher.start(1); groupEnd = matcher.end(1); if ( i < groupEnd ) { nameStart = i; while ( i < groupEnd && !Character.isWhitespace(str.charAt(i)) ) { i++; } macroName = str.substring(nameStart, i); if ( macroName.length() > 0 ) { if ( macroName.equalsIgnoreCase("localelist") ) { StringBuffer sb = new StringBuffer(); for ( int j = 0; j < locales.size(); j++ ) { if ( j > 0 ) { sb.append(','); } sb.append('"'); sb.append(locales.get(j).toString()); sb.append('"'); } str = str.replace(matcher.start(), matcher.end(), sb.toString()); matcher.reset(); } else if ( macroName.equalsIgnoreCase("ilibVersion") ) { StringBuffer sb = new StringBuffer(); sb.append('"'); sb.append(JSAssemble.version); sb.append('"'); str = str.replace(matcher.start(), matcher.end(), sb.toString()); matcher.reset(); } } } } } out.write(str.toString()); out.append('\n'); // in case the file doesn't end with one setWritten(true); } catch ( FileNotFoundException e ) { System.err.println("Error: could not read file " + file.getPath()); throw new Exception(e); } } }
Update copyright statement git-svn-id: c47ce96d6d22bb44eb799edef1c4bf418c7a7dd6@1694 5ac057f5-ce63-4fb3-acd1-ab13b794ca36
java/src/com/ilib/tools/jsa/JSFile.java
Update copyright statement
Java
bsd-2-clause
e1003b2887d8838a971373808e01aa120133c245
0
bioinform/varsim,bioinform/varsim,bioinform/varsim,bioinform/varsim
package com.bina.varsim.types.variant.alt; import com.bina.varsim.types.ChrString; import com.bina.varsim.types.FlexSeq; import java.util.Arrays; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Created by guoy28 on 11/18/16. * class for ALT field in VCF format */ public class Alt { private SymbolicAllele symbolicAllele; private Breakend breakend; private FlexSeq seq; public Alt() {} public SymbolicAllele getSymbolicAllele() { return symbolicAllele; } public void setSymbolicAllele(SymbolicAllele symbolicAllele) { this.symbolicAllele = symbolicAllele; } public Breakend getBreakend() { return breakend; } public void setBreakend(Breakend breakend) { this.breakend = breakend; } public FlexSeq getSeq() { return seq; } public void setSeq(FlexSeq seq) { this.seq = seq; } public enum SVType { DEL, INS, DUP, INV, CNV, BND; //breakend public enum SVSubtype { TANDEM, //consecutive events ME, //mobile element TRA, //translocation (cut-and-paste) IPS; //interspersed } } public static class SymbolicAllele{ /** * looks like <DUP:TANDEM> */ private SVType major; private SVType.SVSubtype minor; @Override public String toString() { return "<" + major + (minor == null? ">" : (":" + minor + ">")); } public SymbolicAllele(String major, String minor) { this.major = SVType.valueOf(major); this.minor = SVType.SVSubtype.valueOf(minor); } public SymbolicAllele(String major) { this.major = SVType.valueOf(major); this.minor = null; } /** * parse <X:Y> * @param alt */ public static SymbolicAllele SymbolicAlleleFactory(String alt) { Pattern r = Pattern.compile("<([^<:>]+):?([^<:>]*)>"); Matcher m = r.matcher(alt); if (m.find()) { if( m.group(2).length() > 0) { return new SymbolicAllele(m.group(1), m.group(2)); } else { return new SymbolicAllele(m.group(1)); } } return null; //failed to parse as a symbolic allele } } public static class Breakend{ /** * looks like ATGC[1:99[ */ private byte[] seq; private ChrString chr; private long pos; private boolean left; //ATGC[[, left=true, [[ATGC, left = false private boolean forward; //[1:99[ forward = true, ]1:99] forward = false public Breakend(byte[] seq, ChrString chr, long pos, boolean left, boolean forward) { this.seq = seq; this.chr = chr; this.pos = pos; this.left = left; this.forward = forward; } @Override public String toString() { if (left) { return new String(seq) + (forward?("[" + chr + ":" + pos + "["):("]" + chr + ":" + pos + "]")); } else { return forward? ("[" + chr + ":" + pos + "["):("]" + chr + ":" + pos + "]") + new String(seq); } } public byte[] getSeq() { return seq; } public ChrString getChr() { return chr; } public long getPos() { return pos; } public boolean isLeft() { return left; } public boolean isForward() { return forward; } } }
src/main/java/com/bina/varsim/types/variant/alt/Alt.java
package com.bina.varsim.types.variant.alt; import com.bina.varsim.types.ChrString; import com.bina.varsim.types.FlexSeq; import java.util.Arrays; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * Created by guoy28 on 11/18/16. * class for ALT field in VCF format */ public class Alt { private SymbolicAllele symbolicAllele; private Breakend breakend; private FlexSeq seq; public Alt() {} public SymbolicAllele getSymbolicAllele() { return symbolicAllele; } public void setSymbolicAllele(SymbolicAllele symbolicAllele) { this.symbolicAllele = symbolicAllele; } public Breakend getBreakend() { return breakend; } public void setBreakend(Breakend breakend) { this.breakend = breakend; } public FlexSeq getSeq() { return seq; } public void setSeq(FlexSeq seq) { this.seq = seq; } public enum SVType { DEL, INS, DUP, INV, CNV, BND; //breakend public enum SVSubtype { TANDEM, //consecutive events ME, //mobile element TRA, //translocation (cut-and-paste) IPS; //interspersed } } public static class SymbolicAllele{ /** * looks like <DUP:TANDEM> */ private SVType major; private SVType.SVSubtype minor; @Override public String toString() { return "<" + major + (minor == null? ">" : (":" + minor + ">")); } public SymbolicAllele(String major, String minor) { this.major = SVType.valueOf(major); this.minor = SVType.SVSubtype.valueOf(minor); } public SymbolicAllele(String major) { this.major = SVType.valueOf(major); this.minor = null; } /** * parse <X:Y> * @param alt */ public static SymbolicAllele SymbolicAlleleFactory(String alt) { Pattern r = Pattern.compile("<([^<:>]+):?([^<:>]*)>"); Matcher m = r.matcher(alt); if (m.find()) { if( m.group(2).length() > 0) { return new SymbolicAllele(m.group(1), m.group(2)); } else { return new SymbolicAllele(m.group(1)); } } return null; //failed to parse as a symbolic allele } } public static class Breakend{ /** * looks like ATGC[1:99[ */ private byte[] seq; private ChrString chr; private long pos; private boolean left; //ATGC[[, left=true, [[ATGC, left = false private boolean forward; //[1:99[ forward = true, ]1:99] forward = false public Breakend(byte[] seq, ChrString chr, long pos, boolean left, boolean forward) { this.seq = seq; this.chr = chr; this.pos = pos; this.left = left; this.forward = forward; } @Override public String toString() { if (left) { return new String(seq) + (forward?("[" + chr + ":" + pos + "["):("]" + chr + ":" + pos + "]")); } else { return forward? ("[" + chr + ":" + pos + "["):("]" + chr + ":" + pos + "]") + new String(seq); } } } }
add getters.
src/main/java/com/bina/varsim/types/variant/alt/Alt.java
add getters.
Java
bsd-3-clause
65331e2fa8659bcad6a39680d25d30068e4e1ce2
0
sebastiangraf/perfidix
package org.perfidix.visitor; import java.io.File; import java.io.FileWriter; import org.perfidix.IResult; import org.perfidix.Result; /** * Storing the raw data without any computation in single files. Per method, one * single file is opened with the data. Only usable with Benchmarkresult * * @author sebi * */ public class RawData extends ResultVisitor { private final File folder; public RawData(final String pathToFolder) { folder = new File(pathToFolder); if (folder.exists()) { if (!folder.isDirectory()) { folder.delete(); } else { deleteRecursive(folder); } } folder.mkdir(); } @Override public void visit(Result r) { if (!(r instanceof IResult.BenchmarkResult)) { throw new RuntimeException("only benchmark results are supported!"); } IResult.BenchmarkResult benchRes = (IResult.BenchmarkResult) r; for (IResult.ClassResult classRes : benchRes.getChildren()) { for (IResult.MethodResult methodRes : classRes.getChildren()) { getMethodResult(methodRes); } } } private void getMethodResult(final IResult.MethodResult methodRes) { try { int j = 1; File currentFile = new File(this.folder.getAbsolutePath() + "//" + methodRes.getName() + j); while (currentFile.exists()) { System.out.println(currentFile.getAbsolutePath() + " is already existing!"); j++; currentFile = new File(this.folder.getAbsolutePath() + "//" + methodRes.getName() + j); } System.out.println("Using " + currentFile.getAbsolutePath() + " for output!"); currentFile.createNewFile(); final FileWriter out = new FileWriter(currentFile); IResult.SingleResult single = methodRes.getChildren().get(0); final long data[] = single.getResultSet(); for (int i = 0; i < data.length; i++) { if (i == data.length - 1) { out.write(data[i] + " "); } else { out.write(data[i] + ","); } } out.flush(); out.close(); } catch (Exception e) { e.printStackTrace(); } } private static void deleteRecursive(final File file) { if (file.isDirectory()) { final File[] childs = file.listFiles(); for (int i = 0; i < childs.length; i++) { deleteRecursive(childs[i]); } } else { file.delete(); } } }
src/org/perfidix/visitor/RawData.java
package org.perfidix.visitor; import java.io.File; import java.io.FileWriter; import org.perfidix.IResult; import org.perfidix.Result; /** * Storing the raw data without any computation in single files. Per method, one * single file is opened with the data. Only usable with Benchmarkresult * * @author sebi * */ public class RawData extends ResultVisitor { private final File folder; public RawData(final String pathToFolder) { folder = new File(pathToFolder); if (folder.exists()) { if (!folder.isDirectory()) { folder.delete(); } else { deleteRecursive(folder); } } folder.mkdir(); } @Override public void visit(Result r) { if (!(r instanceof IResult.BenchmarkResult)) { throw new RuntimeException("only benchmark results are supported!"); } IResult.BenchmarkResult benchRes = (IResult.BenchmarkResult) r; for (IResult.ClassResult classRes : benchRes.getChildren()) { for (IResult.MethodResult methodRes : classRes.getChildren()) { getMethodResult(methodRes); } } } private void getMethodResult(final IResult.MethodResult methodRes) { try { int j = 1; File currentFile = new File(this.folder.getAbsolutePath() + "//" + methodRes.getName() + j ); while (currentFile.exists()) { System.out.println(currentFile.getAbsolutePath() + " is already existing!"); j++; currentFile = new File(this.folder.getAbsolutePath() + "//" + methodRes.getName() + j); } System.out.println("Using " + currentFile.getAbsolutePath() + "for output!"); currentFile.createNewFile(); final FileWriter out = new FileWriter(currentFile); IResult.SingleResult single = methodRes.getChildren().get(0); final long data[] = single.getResultSet(); for (int i = 0; i < data.length; i++) { if (i == data.length - 1) { out.write(data[i] + " "); } else { out.write(data[i] + ","); } } out.flush(); out.close(); } catch (Exception e) { e.printStackTrace(); } } private static void deleteRecursive(final File file) { if (file.isDirectory()) { final File[] childs = file.listFiles(); for (int i = 0; i < childs.length; i++) { deleteRecursive(childs[i]); } } else { file.delete(); } } }
--HG-- extra : convert_revision : svn%3Af0255af0-1427-0410-b2be-e7b72c6cd545/trunk%4078
src/org/perfidix/visitor/RawData.java
Java
bsd-3-clause
589862fb11e2e9f140068b86e3ef29f390fa9e06
0
flexgen/flexgen
/* FlexGen : Flexible Map Generator Library Copyright (C) 2009-2010 Jeffrey J. Weston <jjweston@gmail.com> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the FlexGen project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.flexgen.map.test; import org.junit.Assert; import org.junit.Test; import org.flexgen.map.MapTileOrientation; /** * Test class for the MapTileOrientation class. */ public class MapTileOrientationTest { /** * Verify that the toString() method returns the correct value for all map tile orientations. */ @Test public void toStringTest() { Assert.assertEquals( "Unexpected result for UPRIGHT.", "Upright", MapTileOrientation.UPRIGHT.toString() ); Assert.assertEquals( "Unexpected result for CLOCKWISE.", "Clockwise", MapTileOrientation.CLOCKWISE.toString() ); Assert.assertEquals( "Unexpected result for FLIPPED.", "Flipped", MapTileOrientation.FLIPPED.toString() ); Assert.assertEquals( "Unexpected result for COUNTER_CLOCKWISE.", "CounterClockwise", MapTileOrientation.COUNTER_CLOCKWISE.toString() ); } /** * Verify that the equals() method returns the correct result when called with a null reference. */ @Test public void equals_null() { MapTileOrientation mapTileOrientation1 = MapTileOrientation.FLIPPED; MapTileOrientation mapTileOrientation2 = null; boolean result = mapTileOrientation1.equals( mapTileOrientation2 ); Assert.assertEquals( "Unexpected result.", false, result ); } /** * Verify that the equals() method returns the correct result when called with the wrong type of * object. */ @Test public void equals_wrongType() { MapTileOrientation mapTileOrientation1 = MapTileOrientation.CLOCKWISE; Object mapTileOrientation2 = new Object(); boolean result = mapTileOrientation1.equals( mapTileOrientation2 ); Assert.assertEquals( "Unexpected result.", false, result ); } /** * Verify that the equals() method returns the correct result when called with a different map * tile orientation. */ @Test public void equals_differentInstance() { MapTileOrientation mapTileOrientation1 = MapTileOrientation.COUNTER_CLOCKWISE; MapTileOrientation mapTileOrientation2 = MapTileOrientation.UPRIGHT; boolean result = mapTileOrientation1.equals( mapTileOrientation2 ); Assert.assertEquals( "Unexpected result.", false, result ); } }
src/test/org/flexgen/map/test/MapTileOrientationTest.java
/* FlexGen : Flexible Map Generator Library Copyright (C) 2009-2010 Jeffrey J. Weston <jjweston@gmail.com> All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the FlexGen project nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.flexgen.map.test; import org.junit.Assert; import org.junit.Test; import org.flexgen.map.MapTileOrientation; /** * Test class for the MapTileOrientation class. */ public class MapTileOrientationTest { /** * Verify that the toString() method returns the correct value for all map tile orientations. */ @Test public void toStringTest() { Assert.assertEquals( "Unexpected result for UPRIGHT.", "Upright", MapTileOrientation.UPRIGHT.toString() ); Assert.assertEquals( "Unexpected result for CLOCKWISE.", "Clockwise", MapTileOrientation.CLOCKWISE.toString() ); Assert.assertEquals( "Unexpected result for FLIPPED.", "Flipped", MapTileOrientation.FLIPPED.toString() ); Assert.assertEquals( "Unexpected result for COUNTER_CLOCKWISE.", "CounterClockwise", MapTileOrientation.COUNTER_CLOCKWISE.toString() ); } /** * Verify that the equals() method returns the correct result when called with a null reference. */ @Test public void equals_null() { MapTileOrientation mapTileOrientation1 = MapTileOrientation.FLIPPED; MapTileOrientation mapTileOrientation2 = null; boolean result = mapTileOrientation1.equals( mapTileOrientation2 ); Assert.assertEquals( "Unexpected result.", false, result ); } /** * Verify that the equals() method returns the correct result when called with the wrong type of * object. */ @Test public void equals_wrongType() { MapTileOrientation mapTileOrientation1 = MapTileOrientation.CLOCKWISE; Object mapTileOrientation2 = new Object(); boolean result = mapTileOrientation1.equals( mapTileOrientation2 ); Assert.assertEquals( "Unexpected result.", false, result ); } }
Added another test for MapTileOrientation. Added test ensuring that the equals() method returns the correct result when called with a different map tile orientation.
src/test/org/flexgen/map/test/MapTileOrientationTest.java
Added another test for MapTileOrientation.
Java
mit
07869a6ec27278c96435cbcdc92e3fefbb6f617e
0
Pante/Karus-Commons
/* * The MIT License * * Copyright 2018 Karus Labs. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.karuslabs.annotations.signature; import com.sun.source.tree.TypeParameterTree; @FunctionalInterface public interface TypeParameter { public boolean test(TypeParameterTree tree); public static TypeParameter exactly() { return null; } }
annotations/src/main/java/com/karuslabs/annotations/signature/TypeParameter.java
/* * The MIT License * * Copyright 2018 Karus Labs. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.karuslabs.annotations.signature; import com.sun.source.tree.TypeParameterTree; @FunctionalInterface public interface TypeParameter { public boolean test(TypeParameterTree tree); }
Is it possible to merge types? What if TypeParameter, Type & ParameterizedType were merged? What's the relationship between them?
annotations/src/main/java/com/karuslabs/annotations/signature/TypeParameter.java
Is it possible to merge types?
Java
mit
661865de54773931f06c131fd7d27b561419b029
0
chances/cs410-adv-java,chances/cs410-adv-java,chances/cs410-adv-java
package edu.pdx.cs410J.chances; import edu.pdx.cs410J.AbstractAppointmentBook; import edu.pdx.cs410J.AppointmentBookParser; import edu.pdx.cs410J.ParserException; import java.io.*; /** * @author chancesnow */ public class TextParser implements AppointmentBookParser { private File file; /** * Create a new appt book parser given a file path. * * @param filePath The file path */ public TextParser(String filePath) { file = new File(filePath); // Exceptional state when file doesn't exist or file is a directory if (!file.exists() || file.isDirectory()) { file = null; } } /** * Parse an appointment book from a file. * * @return Parsed appointment book * @throws ParserException */ @Override public AbstractAppointmentBook parse() throws ParserException { if (file != null) { String line; try ( BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(file)) ) ) { line = br.readLine(); } catch (IOException ex) { throw new ParserException(ex.getMessage(), ex); } } return null; } }
Project2/apptbook/src/main/java/edu/pdx/cs410J/chances/TextParser.java
package edu.pdx.cs410J.chances; import edu.pdx.cs410J.AbstractAppointmentBook; import edu.pdx.cs410J.AppointmentBookParser; import edu.pdx.cs410J.ParserException; import java.io.*; /** * @author chancesnow */ public class TextParser implements AppointmentBookParser { private File file; public TextParser(String filePath) { file = new File(filePath); // Exceptional state when file doesn't exist or file is a directory if (!file.exists() || file.isDirectory()) { file = null; } } /** * Parse an appointment book from a file. * * @return Parsed appointment book * @throws ParserException */ @Override public AbstractAppointmentBook parse() throws ParserException { if (file != null) { String line; try ( BufferedReader br = new BufferedReader( new InputStreamReader(new FileInputStream(file)) ) ) { line = br.readLine(); } catch (IOException ex) { throw new ParserException(ex.getMessage(), ex); } } return null; } }
Add JavaDoc to constructor
Project2/apptbook/src/main/java/edu/pdx/cs410J/chances/TextParser.java
Add JavaDoc to constructor
Java
mit
253d0f8afb9930d636e1170c0149d5aba4dedb54
0
EnSoftCorp/purity-toolbox
package com.ensoftcorp.open.purity.analysis; import java.util.ArrayList; import java.util.EnumSet; import java.util.Set; import java.util.TreeSet; import com.ensoftcorp.atlas.core.db.graph.Graph; import com.ensoftcorp.atlas.core.db.graph.GraphElement; import com.ensoftcorp.atlas.core.db.graph.GraphElement.EdgeDirection; import com.ensoftcorp.atlas.core.db.graph.GraphElement.NodeDirection; import com.ensoftcorp.atlas.core.db.graph.Node; import com.ensoftcorp.atlas.core.db.set.AtlasHashSet; import com.ensoftcorp.atlas.core.db.set.AtlasSet; import com.ensoftcorp.atlas.core.query.Q; import com.ensoftcorp.atlas.core.script.Common; import com.ensoftcorp.atlas.core.xcsg.XCSG; import com.ensoftcorp.open.purity.log.Log; import com.ensoftcorp.open.purity.preferences.PurityPreferences; public class Utilities { // // caching for some common graph types // private static boolean cacheInitialized = false; // private static AtlasSet<GraphElement> defaultReadonlyTypes; // // private static void initializeCache(IProgressMonitor monitor) { // // initialize the cache of default readonly types // defaultReadonlyTypes = new AtlasHashSet<GraphElement>(); // // // autoboxing // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Integer").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Long").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Short").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Boolean").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Byte").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Double").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Float").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Character").eval().nodes().getFirst()); // // // a few other objects are special cases for all practical purposes // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "String").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Number").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.util.concurrent.atomic", "AtomicInteger").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.util.concurrent.atomic", "AtomicLong").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.math", "BigDecimal").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.math", "BigInteger").eval().nodes().getFirst()); // } // // /** // * Returns true if the given type is a default readonly type // * @param type // * @return // */ // public static boolean isDefaultReadonlyType(GraphElement type) { // if(type == null){ // return false; // } // if(!cacheInitialized){ // initializeCache(new NullProgressMonitor()); // } // return type.taggedWith(XCSG.Primitive) || defaultReadonlyTypes.contains(type); // } /** * Used as an attribute key to temporarily compute the potential immutability qualifiers */ public static final String IMMUTABILITY_QUALIFIERS = "IMMUTABILITY_QUALIFIERS"; // TODO: bug EnSoft to make tags like these... public static final String CLASS_VARIABLE_ASSIGNMENT = "CLASS_VARIABLE_ASSIGNMENT"; public static final String CLASS_VARIABLE_VALUE = "CLASS_VARIABLE_VALUE"; public static final String CLASS_VARIABLE_ACCESS = "CLASS_VARIABLE_ACCESS"; // TODO: pester EnSoft to remove these...or at least unify with source graph format public static final String DATAFLOW_DISPLAY_NODE = "DATAFLOW_DISPLAY_NODE"; public static final String DUMMY_ASSIGNMENT_NODE = "DUMMY_ASSIGNMENT_NODE"; public static final String DUMMY_RETURN_NODE = "DUMMY_RETURN_NODE"; /** * Adds DUMMY_RETURN_NODE to void methods and DUMMY_ASSIGNMENT_NODE from unassigned callsites to a dummy assignment node */ public static void addDummyReturnAssignments(){ if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding dummy return assignments..."); Q returnsEdges = Common.universe().edgesTaggedWithAny(XCSG.Returns).retainEdges(); Q voidMethods = returnsEdges.predecessors(Common.types("void")); for(GraphElement voidMethod : voidMethods.eval().nodes()){ GraphElement returnValue = Graph.U.createNode(); returnValue.putAttr(XCSG.name, DUMMY_RETURN_NODE); returnValue.tag(XCSG.ReturnValue); returnValue.tag(DUMMY_RETURN_NODE); // create a contains edge from the void method to the return value GraphElement containsEdge = Graph.U.createEdge(voidMethod, returnValue); containsEdge.tag(XCSG.Contains); } // TODO: enable after known bug in Atlas is fixed // // sanity check (all methods have a return value) // Q allMethods = Common.universe().nodesTaggedWithAny(XCSG.Method); // Q returnValues = Common.universe().nodesTaggedWithAny(XCSG.ReturnValue); // long numMissingReturns = allMethods.difference(returnValues.parent()).eval().nodes().size(); // if(numMissingReturns > 0){ // throw new RuntimeException("There are " + numMissingReturns + " missing return value nodes!"); // } Q callsites = Common.universe().nodesTaggedWithAny(XCSG.CallSite); Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); Q assignments = Common.universe().nodesTaggedWithAny(XCSG.Assignment); Q assignedCallsites = localDataFlowEdges.predecessors(assignments).nodesTaggedWithAny(XCSG.CallSite); Q unassignedCallsites = callsites.difference(assignedCallsites); for(GraphElement unassignedCallsite : unassignedCallsites.eval().nodes()){ GraphElement dummyAssignmentNode = Graph.U.createNode(); dummyAssignmentNode.putAttr(XCSG.name, DUMMY_ASSIGNMENT_NODE); dummyAssignmentNode.tag(XCSG.Assignment); dummyAssignmentNode.tag(DUMMY_ASSIGNMENT_NODE); // create edge from unassigned callsite to the dummy assignment node GraphElement localDataFlowEdge = Graph.U.createEdge(unassignedCallsite, dummyAssignmentNode); localDataFlowEdge.tag(XCSG.LocalDataFlow); // create a contains edge from the callsites parent to the dummy assignment node GraphElement parent = Common.toQ(unassignedCallsite).parent().eval().nodes().getFirst(); GraphElement containsEdge = Graph.U.createEdge(parent, dummyAssignmentNode); containsEdge.tag(XCSG.Contains); // if the unassigned callsite does not have an incoming interprocedural data flow edge // then it must be a void method in which case we need to link it up with the corresponding // dummy return node. Since the dummy nodes are just place holders for readonly types, // its not terribly important to completely resolve dynamic dispatches and we can just link // to the dummy return node of the signature method if(interproceduralDataFlowEdges.predecessors(Common.toQ(unassignedCallsite)).eval().nodes().isEmpty()){ GraphElement method = getInvokedMethodSignature(unassignedCallsite); GraphElement returnValue = Common.toQ(method).children().nodesTaggedWithAny(XCSG.ReturnValue).eval().nodes().getFirst(); if(returnValue != null){ if(method != null){ GraphElement interproceduralDataFlowEdge = Graph.U.createEdge(returnValue, unassignedCallsite); interproceduralDataFlowEdge.tag(XCSG.InterproceduralDataFlow); } else { Log.warning("Method is null for unassignedCallsite " + unassignedCallsite.address().toAddressString()); } } else { Log.warning("Return value is null for unassignedCallsite " + unassignedCallsite.address().toAddressString()); } } } // sanity check (all callsites are assigned to an assignment node) long numMissingCallsiteAssignments = callsites.difference(localDataFlowEdges.predecessors(assignments)).eval().nodes().size(); if(numMissingCallsiteAssignments > 0){ throw new RuntimeException("There are " + numMissingCallsiteAssignments + " missing callsite assignments!"); } // sanity check (all callsites get a value from a return value) Q returnValuesToCallsites = interproceduralDataFlowEdges.successors(Common.universe().nodesTaggedWithAny(XCSG.ReturnValue)).nodesTaggedWithAny(XCSG.CallSite); long numMissingCallsiteReturns = callsites.difference(returnValuesToCallsites).eval().nodes().size(); if(numMissingCallsiteReturns > 0){ throw new RuntimeException("There are " + numMissingCallsiteReturns + " missing callsite returns!"); } } /** * Removes DUMMY_RETURN_NODE and DUMMY_ASSIGNMENT_NODE nodes and any edges connected to them */ public static void removeDummyReturnAssignments(){ if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing dummy return assignments..."); // just need to remove the nodes // edges connected to the new nodes will be removed once the nodes are removed Q dummyNodes = Common.universe().nodesTaggedWithAny(DUMMY_RETURN_NODE, DUMMY_ASSIGNMENT_NODE); TreeSet<Node> dummyNodesToRemove = new TreeSet<Node>(); for(Node dummyNode : dummyNodes.eval().nodes()){ dummyNodesToRemove.add(dummyNode); } while(!dummyNodesToRemove.isEmpty()){ Node dummyNode = dummyNodesToRemove.pollFirst(); Graph.U.delete(dummyNode); } } /** * Adds DATAFLOW_DISPLAY_NODE tags to display nodes * Data flow display nodes are added for graph display reasons... */ public static void addDataFlowDisplayNodeTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding data flow display node tags..."); ArrayList<String> nonDataFlowDisplayNodeTags = new ArrayList<String>(); for(String tag : XCSG.HIERARCHY.childrenOfOneParent(XCSG.DataFlow_Node)){ nonDataFlowDisplayNodeTags.add(tag); } String[] nonDataFlowDisplayNodeTagArray = new String[nonDataFlowDisplayNodeTags.size()]; nonDataFlowDisplayNodeTags.toArray(nonDataFlowDisplayNodeTagArray); Q dataFlowNodes = Common.universe().nodesTaggedWithAny(XCSG.DataFlow_Node); Q classVariableAccessNodes = Common.universe().nodesTaggedWithAny(CLASS_VARIABLE_ACCESS); Q nonVanillaDataFlowNodes = Common.universe().nodesTaggedWithAny(nonDataFlowDisplayNodeTagArray); for(GraphElement dataFlowDisplayNode : dataFlowNodes.difference(classVariableAccessNodes, nonVanillaDataFlowNodes).eval().nodes()){ dataFlowDisplayNode.tag(DATAFLOW_DISPLAY_NODE); } // sanity check, better to fail fast here than later... Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q displayNodes = Common.universe().nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE); // data flow display nodes should be accessible only from a local data flow edge Q localDataFlowDisplayNodes = localDataFlowEdges.reverseStep(displayNodes).retainEdges(); if(localDataFlowDisplayNodes.intersection(displayNodes).eval().nodes().size() != displayNodes.eval().nodes().size()){ throw new RuntimeException("Unexpected data flow display nodes!"); } // data flow display nodes parents should not also be data flow display nodes Q dataFlowDisplayNodeParents = localDataFlowEdges.predecessors(displayNodes); if(!dataFlowDisplayNodeParents.nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE).eval().nodes().isEmpty()){ throw new RuntimeException("Unexpected data flow display nodes parents!"); } } /** * Removes DATAFLOW_DISPLAY_NODE tags to display nodes */ public static void removeDataFlowDisplayNodeTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing data flow display node tags..."); AtlasSet<Node> dataFlowDisplayNodes = Common.universe().nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE).eval().nodes(); for(Node dataFlowDisplayNode : dataFlowDisplayNodes){ dataFlowDisplayNode.tags().remove(DATAFLOW_DISPLAY_NODE); } } public static AtlasSet<Node> getDisplayNodeReferences(GraphElement displayNode){ Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q dataFlowDisplayNodeParents = localDataFlowEdges.predecessors(Common.toQ(displayNode)); return dataFlowDisplayNodeParents.eval().nodes(); } /** * Adds CLASS_VARIABLE_ASSIGNMENT, CLASS_VARIABLE_VALUE, and CLASS_VARIABLE_ACCESS * tags to reads/writes on static variables */ public static void addClassVariableAccessTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding class variable access tags..."); Q classVariables = Common.universe().nodesTaggedWithAny(XCSG.ClassVariable); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); AtlasSet<Node> classVariableAssignments = interproceduralDataFlowEdges.predecessors(classVariables).eval().nodes(); for(GraphElement classVariableAssignment : classVariableAssignments){ classVariableAssignment.tag(CLASS_VARIABLE_ASSIGNMENT); classVariableAssignment.tag(CLASS_VARIABLE_ACCESS); } AtlasSet<Node> classVariableValues = interproceduralDataFlowEdges.successors(classVariables).eval().nodes(); for(GraphElement classVariableValue : classVariableValues){ classVariableValue.tag(CLASS_VARIABLE_VALUE); classVariableValue.tag(CLASS_VARIABLE_ACCESS); } } /** * Removes CLASS_VARIABLE_ASSIGNMENT, CLASS_VARIABLE_VALUE, and CLASS_VARIABLE_ACCESS * tags to reads/writes on static variables */ public static void removeClassVariableAccessTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing class variable access tags..."); Q classVariables = Common.universe().nodesTaggedWithAny(XCSG.ClassVariable); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); AtlasSet<Node> classVariableAssignments = interproceduralDataFlowEdges.predecessors(classVariables).eval().nodes(); for(GraphElement classVariableAssignment : classVariableAssignments){ classVariableAssignment.tags().remove(CLASS_VARIABLE_ASSIGNMENT); classVariableAssignment.tags().remove(CLASS_VARIABLE_ACCESS); } AtlasSet<Node> classVariableValues = interproceduralDataFlowEdges.successors(classVariables).eval().nodes(); for(GraphElement classVariableValue : classVariableValues){ classVariableValue.tags().remove(CLASS_VARIABLE_VALUE); classVariableValue.tags().remove(CLASS_VARIABLE_ACCESS); } } /** * Given a callsite this method returns the invoked method signature * @param callsite * @return */ public static Node getInvokedMethodSignature(GraphElement callsite) { // XCSG.InvokedSignature connects a dynamic dispatch to its signature method // XCSG.InvokedFunction connects a static dispatch to it actual target method Q invokedEdges = Common.universe().edgesTaggedWithAny(XCSG.InvokedSignature, XCSG.InvokedFunction); Node method = invokedEdges.successors(Common.toQ(callsite)).eval().nodes().getFirst(); return method; } /** * Sets the type qualifier for a graph element * @param node * @param qualifier * @return Returns true if the type qualifier changed */ public static boolean removeTypes(Node node, Set<ImmutabilityTypes> typesToRemove){ Set<ImmutabilityTypes> typeSet = getTypes(node); String logMessage = "Remove: " + typesToRemove.toString() + " from " + typeSet.toString() + " for " + node.getAttr(XCSG.name); boolean typesChanged = typeSet.removeAll(typesToRemove); if(typesChanged){ if(PurityPreferences.isDebugLoggingEnabled()) Log.info(logMessage); if(PurityPreferences.isDebugLoggingEnabled() && typeSet.isEmpty()) Log.warning("Remove result in an empty type set."); } return typesChanged; } /** * Sets the type qualifier for a graph element * @param node * @param qualifier * @return Returns true if the type qualifier changed */ public static boolean removeTypes(Node node, ImmutabilityTypes... types){ EnumSet<ImmutabilityTypes> typesToRemove = EnumSet.noneOf(ImmutabilityTypes.class); for(ImmutabilityTypes type : types){ typesToRemove.add(type); } return removeTypes(node, typesToRemove); } @SuppressWarnings("unchecked") public static Set<ImmutabilityTypes> getTypes(GraphElement ge){ if(ge.hasAttr(IMMUTABILITY_QUALIFIERS)){ return (Set<ImmutabilityTypes>) ge.getAttr(IMMUTABILITY_QUALIFIERS); } else { EnumSet<ImmutabilityTypes> qualifiers = getDefaultTypes(ge); ge.putAttr(IMMUTABILITY_QUALIFIERS, qualifiers); return qualifiers; } } public static GraphElement getObjectType(GraphElement ge) { Q typeOfEdges = Common.universe().edgesTaggedWithAny(XCSG.TypeOf); return typeOfEdges.successors(Common.toQ(ge)).eval().nodes().getFirst(); } public static AtlasSet<Node> parseReferences(Node node){ if(PurityPreferences.isDebugLoggingEnabled()) Log.info("Parsing reference for " + node.address().toAddressString()); AtlasSet<Node> parsedReferences = new AtlasHashSet<Node>(); TreeSet<Node> worklist = new TreeSet<Node>(); worklist.add(node); Q dataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.DataFlow_Edge); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); while(!worklist.isEmpty()){ GraphElement reference = worklist.pollFirst(); if(reference != null && needsProcessing(reference)){ if(reference.taggedWith(XCSG.Cast)){ for(Node workItem : dataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } if(reference.taggedWith(DATAFLOW_DISPLAY_NODE)){ for(Node workItem : Utilities.getDisplayNodeReferences(reference)){ worklist.add(workItem); } continue; } if(reference.taggedWith(XCSG.CallSite)){ // parse return, a callsite on a callsite must be a callsite on the resulting object from the first callsite Node method = Utilities.getInvokedMethodSignature(reference); worklist.add(Common.toQ(method).children().nodesTaggedWithAny(XCSG.ReturnValue).eval().nodes().getFirst()); continue; } // get the field for instance and class variable assignments if(reference.taggedWith(XCSG.InstanceVariableAssignment) || reference.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ for(Node workItem : interproceduralDataFlowEdges.successors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the field for instance and class variable values if(reference.taggedWith(XCSG.InstanceVariableValue) || reference.taggedWith(Utilities.CLASS_VARIABLE_VALUE)){ for(Node workItem : interproceduralDataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the array components being written to if(reference.taggedWith(XCSG.ArrayWrite)){ for(Node workItem : interproceduralDataFlowEdges.successors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the array components being read from if(reference.taggedWith(XCSG.ArrayRead)){ for(Node workItem : interproceduralDataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } String message = "Unhandled reference type for GraphElement " + node.address().toAddressString(); RuntimeException e = new RuntimeException(message); Log.error(message, e); throw e; } else { if(reference == null){ String message = "Null reference for GraphElement " + node.address().toAddressString(); RuntimeException e = new RuntimeException(message); Log.error(message, e); throw e; } else { parsedReferences.add(reference); } } } return parsedReferences; } private static boolean needsProcessing(GraphElement ge){ if(ge.taggedWith(DATAFLOW_DISPLAY_NODE)){ return true; } if(ge.taggedWith(XCSG.Cast)){ return true; } if(ge.taggedWith(XCSG.CallSite)){ return true; } if(ge.taggedWith(XCSG.InstanceVariableAccess) || ge.taggedWith(Utilities.CLASS_VARIABLE_ACCESS)){ return true; } if(ge.taggedWith(XCSG.ArrayAccess)){ return true; } return !isTypable(ge); } public static boolean isTypable(GraphElement ge){ // invalid types if(ge.taggedWith(XCSG.InstanceVariableAccess) || ge.taggedWith(Utilities.CLASS_VARIABLE_ACCESS)){ return false; } if(ge.taggedWith(XCSG.ArrayAccess)){ return false; } // valid types if(ge.taggedWith(DUMMY_ASSIGNMENT_NODE) || ge.taggedWith(DUMMY_RETURN_NODE)){ // these are dummy read only nodes return true; } if(ge.taggedWith(XCSG.Null)){ return true; } if(ge.taggedWith(XCSG.Literal)){ return true; } if(ge.taggedWith(XCSG.Instantiation) || ge.taggedWith(XCSG.ArrayInstantiation)){ return true; } if(ge.taggedWith(XCSG.Method)){ return true; } if(ge.taggedWith(XCSG.Identity)){ return true; } if(ge.taggedWith(XCSG.Parameter)){ return true; } if(ge.taggedWith(XCSG.ReturnValue)){ return true; } if(ge.taggedWith(XCSG.InstanceVariable) || ge.taggedWith(XCSG.ClassVariable)){ return true; } if(ge.taggedWith(XCSG.ArrayComponents)){ return true; } if(ge.taggedWith(XCSG.ParameterPass)){ return true; } if(ge.taggedWith(XCSG.Operator)){ return true; } if(ge.taggedWith(XCSG.CaughtValue)){ return true; } if(ge.taggedWith(XCSG.ElementFromCollection)){ return true; } if(ge.taggedWith(XCSG.Assignment)){ if(!ge.taggedWith(XCSG.InstanceVariableAssignment) && !ge.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ return true; } } // if(isDefaultReadonlyType(getObjectType(ge))){ // return true; // } // something made it through the gap... return false; } public static EnumSet<ImmutabilityTypes> getDefaultTypes(GraphElement ge) { if(!isTypable(ge)){ RuntimeException e = new RuntimeException("Unexpected graph element: " + ge.address()); Log.error("Unexpected graph element: " + ge.address(), e); throw e; } EnumSet<ImmutabilityTypes> qualifiers = EnumSet.noneOf(ImmutabilityTypes.class); if(ge.taggedWith(DUMMY_ASSIGNMENT_NODE) || ge.taggedWith(DUMMY_RETURN_NODE)){ // these are dummy read only nodes that help to provide context sensitivity // in unassigned callsites or void methods qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Null)){ // null does not modify the stack or heap so it is readonly // however in order to satisfy constraints the other types should be initialized // note that assignments of nulls to a field can still mutate an object qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Literal)){ // several java objects are readonly for all practical purposes // however in order to satisfy constraints the other types should be initialized qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Instantiation) || ge.taggedWith(XCSG.ArrayInstantiation)){ // Type Rule 1 - TNEW // return type of a constructor is only mutable // x = new C(); // no effect on qualifier to x qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ReturnValue)){ // Section 2.4 of Reference 1 // "Method returns are initialized S(ret) = {readonly, polyread} for each method m" qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); } else if (ge.taggedWith(XCSG.Parameter)){ qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Identity)){ qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.InstanceVariable)){ // Section 2.4 of Reference 1 // "Fields are initialized to S(f) = {readonly, polyread}" qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); } else if(ge.taggedWith(XCSG.ClassVariable)){ // Section 3 of Reference 1 // static fields are initialized to S(sf) = {readonly, mutable} qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Method)){ // Section 3 of Reference 1 // methods can have a static type of {readonly, polyread, mutable} qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Operator)){ // the result of a primitive operation on primitives or primitive references is always readonly qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ArrayComponents)){ qualifiers.add(ImmutabilityTypes.READONLY); // qualifiers.add(ImmutabilityTypes.POLYREAD); // an array component is basically a local reference, TODO: what about array fields? qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.CaughtValue)){ // caught exceptions could be polyread since they could come from multiple call stacks qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ElementFromCollection)){ // TODO: should probably treat these like array components (mutations to these mutate the collection) qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ParameterPass)){ // Section 2.4 of Reference 1 // "All other references are initialized to the maximal // set of qualifiers, i.e. S(x) = {readonly, polyread, mutable}" // But, what does it mean for a local reference to be polyread? ~Ben qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Assignment)){ if(!ge.taggedWith(XCSG.InstanceVariableAssignment) && !ge.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ // could be a local reference // Section 2.4 of Reference 1 // "All other references are initialized to the maximal // set of qualifiers, i.e. S(x) = {readonly, polyread, mutable}" // But, what does it mean for a local reference to be polyread? ~Ben qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } } // else if(isDefaultReadonlyType(Utilities.getObjectType(ge))){ // // several java objects are readonly for all practical purposes // // however in order to satisfy constraints the other types should be initialized // qualifiers.add(ImmutabilityTypes.READONLY); // qualifiers.add(ImmutabilityTypes.POLYREAD); // qualifiers.add(ImmutabilityTypes.MUTABLE); // } else { RuntimeException e = new RuntimeException("Unexpected graph element: " + ge.address()); Log.error("Unexpected graph element: " + ge.address(), e); throw e; } return qualifiers; } /** * Returns the fields or local variables accessed for an instance variable access * @param fieldAccess * @return */ public static AtlasSet<Node> getAccessedContainers(GraphElement fieldAccess){ Q instanceVariableAccessedEdges = Common.universe().edgesTaggedWithAny(XCSG.InstanceVariableAccessed); Q variablesAccessed = instanceVariableAccessedEdges.reverse(Common.toQ(fieldAccess)); Q instanceVariablesAccessed = variablesAccessed.nodesTaggedWithAny(XCSG.InstanceVariableAccess); Q classVariablesAccessed = variablesAccessed.nodesTaggedWithAny(CLASS_VARIABLE_ACCESS); Q localVariables = variablesAccessed.difference(instanceVariablesAccessed, classVariablesAccessed); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); Q fieldsAccessed = interproceduralDataFlowEdges.predecessors(instanceVariablesAccessed.union(classVariablesAccessed)); return localVariables.union(fieldsAccessed).eval().nodes(); } /** * Returns the containing method of a given graph element or null if one is not found * @param node * @return */ public static Node getContainingMethod(Node node) { // NOTE: the enclosing method may be two steps or more above return getContainingNode(node, XCSG.Method); } /** * Find the next immediate containing node with the given tag. * * @param node * @param containingTag * @return the next immediate containing node, or null if none exists; never returns the given node */ public static Node getContainingNode(Node node, String containingTag) { if(node == null){ return null; } while(true) { GraphElement containsEdge = Graph.U.edges(node, NodeDirection.IN).taggedWithAll(XCSG.Contains).getFirst(); if(containsEdge == null){ return null; } Node parent = containsEdge.getNode(EdgeDirection.FROM); if(parent.taggedWith(containingTag)){ return parent; } node = parent; } } }
com.ensoftcorp.open.purity/src/com/ensoftcorp/open/purity/analysis/Utilities.java
package com.ensoftcorp.open.purity.analysis; import java.util.ArrayList; import java.util.EnumSet; import java.util.Set; import java.util.TreeSet; import com.ensoftcorp.atlas.core.db.graph.Graph; import com.ensoftcorp.atlas.core.db.graph.GraphElement; import com.ensoftcorp.atlas.core.db.graph.GraphElement.EdgeDirection; import com.ensoftcorp.atlas.core.db.graph.GraphElement.NodeDirection; import com.ensoftcorp.atlas.core.db.graph.Node; import com.ensoftcorp.atlas.core.db.set.AtlasHashSet; import com.ensoftcorp.atlas.core.db.set.AtlasSet; import com.ensoftcorp.atlas.core.query.Q; import com.ensoftcorp.atlas.core.script.Common; import com.ensoftcorp.atlas.core.xcsg.XCSG; import com.ensoftcorp.open.purity.log.Log; import com.ensoftcorp.open.purity.preferences.PurityPreferences; public class Utilities { // // caching for some common graph types // private static boolean cacheInitialized = false; // private static AtlasSet<GraphElement> defaultReadonlyTypes; // // private static void initializeCache(IProgressMonitor monitor) { // // initialize the cache of default readonly types // defaultReadonlyTypes = new AtlasHashSet<GraphElement>(); // // // autoboxing // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Integer").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Long").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Short").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Boolean").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Byte").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Double").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Float").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Character").eval().nodes().getFirst()); // // // a few other objects are special cases for all practical purposes // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "String").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.lang", "Number").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.util.concurrent.atomic", "AtomicInteger").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.util.concurrent.atomic", "AtomicLong").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.math", "BigDecimal").eval().nodes().getFirst()); // defaultReadonlyTypes.add(Common.typeSelect("java.math", "BigInteger").eval().nodes().getFirst()); // } // // /** // * Returns true if the given type is a default readonly type // * @param type // * @return // */ // public static boolean isDefaultReadonlyType(GraphElement type) { // if(type == null){ // return false; // } // if(!cacheInitialized){ // initializeCache(new NullProgressMonitor()); // } // return type.taggedWith(XCSG.Primitive) || defaultReadonlyTypes.contains(type); // } /** * Used as an attribute key to temporarily compute the potential immutability qualifiers */ public static final String IMMUTABILITY_QUALIFIERS = "IMMUTABILITY_QUALIFIERS"; // TODO: bug EnSoft to make tags like these... public static final String CLASS_VARIABLE_ASSIGNMENT = "CLASS_VARIABLE_ASSIGNMENT"; public static final String CLASS_VARIABLE_VALUE = "CLASS_VARIABLE_VALUE"; public static final String CLASS_VARIABLE_ACCESS = "CLASS_VARIABLE_ACCESS"; // TODO: pester EnSoft to remove these...or at least unify with source graph format public static final String DATAFLOW_DISPLAY_NODE = "DATAFLOW_DISPLAY_NODE"; public static final String DUMMY_ASSIGNMENT_NODE = "DUMMY_ASSIGNMENT_NODE"; public static final String DUMMY_RETURN_NODE = "DUMMY_RETURN_NODE"; /** * Adds DUMMY_RETURN_NODE to void methods and DUMMY_ASSIGNMENT_NODE from unassigned callsites to a dummy assignment node */ public static void addDummyReturnAssignments(){ if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding dummy return assignments..."); Q returnsEdges = Common.universe().edgesTaggedWithAny(XCSG.Returns).retainEdges(); Q voidMethods = returnsEdges.predecessors(Common.types("void")); for(GraphElement voidMethod : voidMethods.eval().nodes()){ GraphElement returnValue = Graph.U.createNode(); returnValue.putAttr(XCSG.name, DUMMY_RETURN_NODE); returnValue.tag(XCSG.ReturnValue); returnValue.tag(DUMMY_RETURN_NODE); // create a contains edge from the void method to the return value GraphElement containsEdge = Graph.U.createEdge(voidMethod, returnValue); containsEdge.tag(XCSG.Contains); } // TODO: enable after known bug in Atlas is fixed // // sanity check (all methods have a return value) // Q allMethods = Common.universe().nodesTaggedWithAny(XCSG.Method); // Q returnValues = Common.universe().nodesTaggedWithAny(XCSG.ReturnValue); // long numMissingReturns = allMethods.difference(returnValues.parent()).eval().nodes().size(); // if(numMissingReturns > 0){ // throw new RuntimeException("There are " + numMissingReturns + " missing return value nodes!"); // } Q callsites = Common.universe().nodesTaggedWithAny(XCSG.CallSite); Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); Q assignments = Common.universe().nodesTaggedWithAny(XCSG.Assignment); Q assignedCallsites = localDataFlowEdges.predecessors(assignments).nodesTaggedWithAny(XCSG.CallSite); Q unassignedCallsites = callsites.difference(assignedCallsites); for(GraphElement unassignedCallsite : unassignedCallsites.eval().nodes()){ GraphElement dummyAssignmentNode = Graph.U.createNode(); dummyAssignmentNode.putAttr(XCSG.name, DUMMY_ASSIGNMENT_NODE); dummyAssignmentNode.tag(XCSG.Assignment); dummyAssignmentNode.tag(DUMMY_ASSIGNMENT_NODE); // create edge from unassigned callsite to the dummy assignment node GraphElement localDataFlowEdge = Graph.U.createEdge(unassignedCallsite, dummyAssignmentNode); localDataFlowEdge.tag(XCSG.LocalDataFlow); // create a contains edge from the callsites parent to the dummy assignment node GraphElement parent = Common.toQ(unassignedCallsite).parent().eval().nodes().getFirst(); GraphElement containsEdge = Graph.U.createEdge(parent, dummyAssignmentNode); containsEdge.tag(XCSG.Contains); // if the unassigned callsite does not have an incoming interprocedural data flow edge // then it must be a void method in which case we need to link it up with the corresponding // dummy return node. Since the dummy nodes are just place holders for readonly types, // its not terribly important to completely resolve dynamic dispatches and we can just link // to the dummy return node of the signature method if(interproceduralDataFlowEdges.predecessors(Common.toQ(unassignedCallsite)).eval().nodes().isEmpty()){ GraphElement method = getInvokedMethodSignature(unassignedCallsite); GraphElement returnValue = Common.toQ(method).children().nodesTaggedWithAny(XCSG.ReturnValue).eval().nodes().getFirst(); GraphElement interproceduralDataFlowEdge = Graph.U.createEdge(returnValue, unassignedCallsite); interproceduralDataFlowEdge.tag(XCSG.InterproceduralDataFlow); } } // sanity check (all callsites are assigned to an assignment node) long numMissingCallsiteAssignments = callsites.difference(localDataFlowEdges.predecessors(assignments)).eval().nodes().size(); if(numMissingCallsiteAssignments > 0){ throw new RuntimeException("There are " + numMissingCallsiteAssignments + " missing callsite assignments!"); } // sanity check (all callsites get a value from a return value) Q returnValuesToCallsites = interproceduralDataFlowEdges.successors(Common.universe().nodesTaggedWithAny(XCSG.ReturnValue)).nodesTaggedWithAny(XCSG.CallSite); long numMissingCallsiteReturns = callsites.difference(returnValuesToCallsites).eval().nodes().size(); if(numMissingCallsiteReturns > 0){ throw new RuntimeException("There are " + numMissingCallsiteReturns + " missing callsite returns!"); } } /** * Removes DUMMY_RETURN_NODE and DUMMY_ASSIGNMENT_NODE nodes and any edges connected to them */ public static void removeDummyReturnAssignments(){ if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing dummy return assignments..."); // just need to remove the nodes // edges connected to the new nodes will be removed once the nodes are removed Q dummyNodes = Common.universe().nodesTaggedWithAny(DUMMY_RETURN_NODE, DUMMY_ASSIGNMENT_NODE); TreeSet<Node> dummyNodesToRemove = new TreeSet<Node>(); for(Node dummyNode : dummyNodes.eval().nodes()){ dummyNodesToRemove.add(dummyNode); } while(!dummyNodesToRemove.isEmpty()){ Node dummyNode = dummyNodesToRemove.pollFirst(); Graph.U.delete(dummyNode); } } /** * Adds DATAFLOW_DISPLAY_NODE tags to display nodes * Data flow display nodes are added for graph display reasons... */ public static void addDataFlowDisplayNodeTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding data flow display node tags..."); ArrayList<String> nonDataFlowDisplayNodeTags = new ArrayList<String>(); for(String tag : XCSG.HIERARCHY.childrenOfOneParent(XCSG.DataFlow_Node)){ nonDataFlowDisplayNodeTags.add(tag); } String[] nonDataFlowDisplayNodeTagArray = new String[nonDataFlowDisplayNodeTags.size()]; nonDataFlowDisplayNodeTags.toArray(nonDataFlowDisplayNodeTagArray); Q dataFlowNodes = Common.universe().nodesTaggedWithAny(XCSG.DataFlow_Node); Q classVariableAccessNodes = Common.universe().nodesTaggedWithAny(CLASS_VARIABLE_ACCESS); Q nonVanillaDataFlowNodes = Common.universe().nodesTaggedWithAny(nonDataFlowDisplayNodeTagArray); for(GraphElement dataFlowDisplayNode : dataFlowNodes.difference(classVariableAccessNodes, nonVanillaDataFlowNodes).eval().nodes()){ dataFlowDisplayNode.tag(DATAFLOW_DISPLAY_NODE); } // sanity check, better to fail fast here than later... Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q displayNodes = Common.universe().nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE); // data flow display nodes should be accessible only from a local data flow edge Q localDataFlowDisplayNodes = localDataFlowEdges.reverseStep(displayNodes).retainEdges(); if(localDataFlowDisplayNodes.intersection(displayNodes).eval().nodes().size() != displayNodes.eval().nodes().size()){ throw new RuntimeException("Unexpected data flow display nodes!"); } // data flow display nodes parents should not also be data flow display nodes Q dataFlowDisplayNodeParents = localDataFlowEdges.predecessors(displayNodes); if(!dataFlowDisplayNodeParents.nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE).eval().nodes().isEmpty()){ throw new RuntimeException("Unexpected data flow display nodes parents!"); } } /** * Removes DATAFLOW_DISPLAY_NODE tags to display nodes */ public static void removeDataFlowDisplayNodeTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing data flow display node tags..."); AtlasSet<Node> dataFlowDisplayNodes = Common.universe().nodesTaggedWithAny(DATAFLOW_DISPLAY_NODE).eval().nodes(); for(Node dataFlowDisplayNode : dataFlowDisplayNodes){ dataFlowDisplayNode.tags().remove(DATAFLOW_DISPLAY_NODE); } } public static AtlasSet<Node> getDisplayNodeReferences(GraphElement displayNode){ Q localDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.LocalDataFlow); Q dataFlowDisplayNodeParents = localDataFlowEdges.predecessors(Common.toQ(displayNode)); return dataFlowDisplayNodeParents.eval().nodes(); } /** * Adds CLASS_VARIABLE_ASSIGNMENT, CLASS_VARIABLE_VALUE, and CLASS_VARIABLE_ACCESS * tags to reads/writes on static variables */ public static void addClassVariableAccessTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Adding class variable access tags..."); Q classVariables = Common.universe().nodesTaggedWithAny(XCSG.ClassVariable); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); AtlasSet<Node> classVariableAssignments = interproceduralDataFlowEdges.predecessors(classVariables).eval().nodes(); for(GraphElement classVariableAssignment : classVariableAssignments){ classVariableAssignment.tag(CLASS_VARIABLE_ASSIGNMENT); classVariableAssignment.tag(CLASS_VARIABLE_ACCESS); } AtlasSet<Node> classVariableValues = interproceduralDataFlowEdges.successors(classVariables).eval().nodes(); for(GraphElement classVariableValue : classVariableValues){ classVariableValue.tag(CLASS_VARIABLE_VALUE); classVariableValue.tag(CLASS_VARIABLE_ACCESS); } } /** * Removes CLASS_VARIABLE_ASSIGNMENT, CLASS_VARIABLE_VALUE, and CLASS_VARIABLE_ACCESS * tags to reads/writes on static variables */ public static void removeClassVariableAccessTags() { if(PurityPreferences.isGeneralLoggingEnabled()) Log.info("Removing class variable access tags..."); Q classVariables = Common.universe().nodesTaggedWithAny(XCSG.ClassVariable); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); AtlasSet<Node> classVariableAssignments = interproceduralDataFlowEdges.predecessors(classVariables).eval().nodes(); for(GraphElement classVariableAssignment : classVariableAssignments){ classVariableAssignment.tags().remove(CLASS_VARIABLE_ASSIGNMENT); classVariableAssignment.tags().remove(CLASS_VARIABLE_ACCESS); } AtlasSet<Node> classVariableValues = interproceduralDataFlowEdges.successors(classVariables).eval().nodes(); for(GraphElement classVariableValue : classVariableValues){ classVariableValue.tags().remove(CLASS_VARIABLE_VALUE); classVariableValue.tags().remove(CLASS_VARIABLE_ACCESS); } } /** * Given a callsite this method returns the invoked method signature * @param callsite * @return */ public static Node getInvokedMethodSignature(GraphElement callsite) { // XCSG.InvokedSignature connects a dynamic dispatch to its signature method // XCSG.InvokedFunction connects a static dispatch to it actual target method Q invokedEdges = Common.universe().edgesTaggedWithAny(XCSG.InvokedSignature, XCSG.InvokedFunction); Node method = invokedEdges.successors(Common.toQ(callsite)).eval().nodes().getFirst(); return method; } /** * Sets the type qualifier for a graph element * @param node * @param qualifier * @return Returns true if the type qualifier changed */ public static boolean removeTypes(Node node, Set<ImmutabilityTypes> typesToRemove){ Set<ImmutabilityTypes> typeSet = getTypes(node); String logMessage = "Remove: " + typesToRemove.toString() + " from " + typeSet.toString() + " for " + node.getAttr(XCSG.name); boolean typesChanged = typeSet.removeAll(typesToRemove); if(typesChanged){ if(PurityPreferences.isDebugLoggingEnabled()) Log.info(logMessage); if(PurityPreferences.isDebugLoggingEnabled() && typeSet.isEmpty()) Log.warning("Remove result in an empty type set."); } return typesChanged; } /** * Sets the type qualifier for a graph element * @param node * @param qualifier * @return Returns true if the type qualifier changed */ public static boolean removeTypes(Node node, ImmutabilityTypes... types){ EnumSet<ImmutabilityTypes> typesToRemove = EnumSet.noneOf(ImmutabilityTypes.class); for(ImmutabilityTypes type : types){ typesToRemove.add(type); } return removeTypes(node, typesToRemove); } @SuppressWarnings("unchecked") public static Set<ImmutabilityTypes> getTypes(GraphElement ge){ if(ge.hasAttr(IMMUTABILITY_QUALIFIERS)){ return (Set<ImmutabilityTypes>) ge.getAttr(IMMUTABILITY_QUALIFIERS); } else { EnumSet<ImmutabilityTypes> qualifiers = getDefaultTypes(ge); ge.putAttr(IMMUTABILITY_QUALIFIERS, qualifiers); return qualifiers; } } public static GraphElement getObjectType(GraphElement ge) { Q typeOfEdges = Common.universe().edgesTaggedWithAny(XCSG.TypeOf); return typeOfEdges.successors(Common.toQ(ge)).eval().nodes().getFirst(); } public static AtlasSet<Node> parseReferences(Node node){ if(PurityPreferences.isDebugLoggingEnabled()) Log.info("Parsing reference for " + node.address().toAddressString()); AtlasSet<Node> parsedReferences = new AtlasHashSet<Node>(); TreeSet<Node> worklist = new TreeSet<Node>(); worklist.add(node); Q dataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.DataFlow_Edge); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); while(!worklist.isEmpty()){ GraphElement reference = worklist.pollFirst(); if(reference != null && needsProcessing(reference)){ if(reference.taggedWith(XCSG.Cast)){ for(Node workItem : dataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } if(reference.taggedWith(DATAFLOW_DISPLAY_NODE)){ for(Node workItem : Utilities.getDisplayNodeReferences(reference)){ worklist.add(workItem); } continue; } if(reference.taggedWith(XCSG.CallSite)){ // parse return, a callsite on a callsite must be a callsite on the resulting object from the first callsite Node method = Utilities.getInvokedMethodSignature(reference); worklist.add(Common.toQ(method).children().nodesTaggedWithAny(XCSG.ReturnValue).eval().nodes().getFirst()); continue; } // get the field for instance and class variable assignments if(reference.taggedWith(XCSG.InstanceVariableAssignment) || reference.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ for(Node workItem : interproceduralDataFlowEdges.successors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the field for instance and class variable values if(reference.taggedWith(XCSG.InstanceVariableValue) || reference.taggedWith(Utilities.CLASS_VARIABLE_VALUE)){ for(Node workItem : interproceduralDataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the array components being written to if(reference.taggedWith(XCSG.ArrayWrite)){ for(Node workItem : interproceduralDataFlowEdges.successors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } // get the array components being read from if(reference.taggedWith(XCSG.ArrayRead)){ for(Node workItem : interproceduralDataFlowEdges.predecessors(Common.toQ(reference)).eval().nodes()){ worklist.add(workItem); } continue; } String message = "Unhandled reference type for GraphElement " + node.address().toAddressString(); RuntimeException e = new RuntimeException(message); Log.error(message, e); throw e; } else { if(reference == null){ String message = "Null reference for GraphElement " + node.address().toAddressString(); RuntimeException e = new RuntimeException(message); Log.error(message, e); throw e; } else { parsedReferences.add(reference); } } } return parsedReferences; } private static boolean needsProcessing(GraphElement ge){ if(ge.taggedWith(DATAFLOW_DISPLAY_NODE)){ return true; } if(ge.taggedWith(XCSG.Cast)){ return true; } if(ge.taggedWith(XCSG.CallSite)){ return true; } if(ge.taggedWith(XCSG.InstanceVariableAccess) || ge.taggedWith(Utilities.CLASS_VARIABLE_ACCESS)){ return true; } if(ge.taggedWith(XCSG.ArrayAccess)){ return true; } return !isTypable(ge); } public static boolean isTypable(GraphElement ge){ // invalid types if(ge.taggedWith(XCSG.InstanceVariableAccess) || ge.taggedWith(Utilities.CLASS_VARIABLE_ACCESS)){ return false; } if(ge.taggedWith(XCSG.ArrayAccess)){ return false; } // valid types if(ge.taggedWith(DUMMY_ASSIGNMENT_NODE) || ge.taggedWith(DUMMY_RETURN_NODE)){ // these are dummy read only nodes return true; } if(ge.taggedWith(XCSG.Null)){ return true; } if(ge.taggedWith(XCSG.Literal)){ return true; } if(ge.taggedWith(XCSG.Instantiation) || ge.taggedWith(XCSG.ArrayInstantiation)){ return true; } if(ge.taggedWith(XCSG.Method)){ return true; } if(ge.taggedWith(XCSG.Identity)){ return true; } if(ge.taggedWith(XCSG.Parameter)){ return true; } if(ge.taggedWith(XCSG.ReturnValue)){ return true; } if(ge.taggedWith(XCSG.InstanceVariable) || ge.taggedWith(XCSG.ClassVariable)){ return true; } if(ge.taggedWith(XCSG.ArrayComponents)){ return true; } if(ge.taggedWith(XCSG.ParameterPass)){ return true; } if(ge.taggedWith(XCSG.Operator)){ return true; } if(ge.taggedWith(XCSG.CaughtValue)){ return true; } if(ge.taggedWith(XCSG.ElementFromCollection)){ return true; } if(ge.taggedWith(XCSG.Assignment)){ if(!ge.taggedWith(XCSG.InstanceVariableAssignment) && !ge.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ return true; } } // if(isDefaultReadonlyType(getObjectType(ge))){ // return true; // } // something made it through the gap... return false; } public static EnumSet<ImmutabilityTypes> getDefaultTypes(GraphElement ge) { if(!isTypable(ge)){ RuntimeException e = new RuntimeException("Unexpected graph element: " + ge.address()); Log.error("Unexpected graph element: " + ge.address(), e); throw e; } EnumSet<ImmutabilityTypes> qualifiers = EnumSet.noneOf(ImmutabilityTypes.class); if(ge.taggedWith(DUMMY_ASSIGNMENT_NODE) || ge.taggedWith(DUMMY_RETURN_NODE)){ // these are dummy read only nodes that help to provide context sensitivity // in unassigned callsites or void methods qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Null)){ // null does not modify the stack or heap so it is readonly // however in order to satisfy constraints the other types should be initialized // note that assignments of nulls to a field can still mutate an object qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Literal)){ // several java objects are readonly for all practical purposes // however in order to satisfy constraints the other types should be initialized qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Instantiation) || ge.taggedWith(XCSG.ArrayInstantiation)){ // Type Rule 1 - TNEW // return type of a constructor is only mutable // x = new C(); // no effect on qualifier to x qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ReturnValue)){ // Section 2.4 of Reference 1 // "Method returns are initialized S(ret) = {readonly, polyread} for each method m" qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); } else if (ge.taggedWith(XCSG.Parameter)){ qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Identity)){ qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.InstanceVariable)){ // Section 2.4 of Reference 1 // "Fields are initialized to S(f) = {readonly, polyread}" qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); } else if(ge.taggedWith(XCSG.ClassVariable)){ // Section 3 of Reference 1 // static fields are initialized to S(sf) = {readonly, mutable} qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Method)){ // Section 3 of Reference 1 // methods can have a static type of {readonly, polyread, mutable} qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Operator)){ // the result of a primitive operation on primitives or primitive references is always readonly qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ArrayComponents)){ qualifiers.add(ImmutabilityTypes.READONLY); // qualifiers.add(ImmutabilityTypes.POLYREAD); // an array component is basically a local reference, TODO: what about array fields? qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.CaughtValue)){ // caught exceptions could be polyread since they could come from multiple call stacks qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.POLYREAD); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ElementFromCollection)){ // TODO: should probably treat these like array components (mutations to these mutate the collection) qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.ParameterPass)){ // Section 2.4 of Reference 1 // "All other references are initialized to the maximal // set of qualifiers, i.e. S(x) = {readonly, polyread, mutable}" // But, what does it mean for a local reference to be polyread? ~Ben qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } else if(ge.taggedWith(XCSG.Assignment)){ if(!ge.taggedWith(XCSG.InstanceVariableAssignment) && !ge.taggedWith(Utilities.CLASS_VARIABLE_ASSIGNMENT)){ // could be a local reference // Section 2.4 of Reference 1 // "All other references are initialized to the maximal // set of qualifiers, i.e. S(x) = {readonly, polyread, mutable}" // But, what does it mean for a local reference to be polyread? ~Ben qualifiers.add(ImmutabilityTypes.READONLY); qualifiers.add(ImmutabilityTypes.MUTABLE); } } // else if(isDefaultReadonlyType(Utilities.getObjectType(ge))){ // // several java objects are readonly for all practical purposes // // however in order to satisfy constraints the other types should be initialized // qualifiers.add(ImmutabilityTypes.READONLY); // qualifiers.add(ImmutabilityTypes.POLYREAD); // qualifiers.add(ImmutabilityTypes.MUTABLE); // } else { RuntimeException e = new RuntimeException("Unexpected graph element: " + ge.address()); Log.error("Unexpected graph element: " + ge.address(), e); throw e; } return qualifiers; } /** * Returns the fields or local variables accessed for an instance variable access * @param fieldAccess * @return */ public static AtlasSet<Node> getAccessedContainers(GraphElement fieldAccess){ Q instanceVariableAccessedEdges = Common.universe().edgesTaggedWithAny(XCSG.InstanceVariableAccessed); Q variablesAccessed = instanceVariableAccessedEdges.reverse(Common.toQ(fieldAccess)); Q instanceVariablesAccessed = variablesAccessed.nodesTaggedWithAny(XCSG.InstanceVariableAccess); Q classVariablesAccessed = variablesAccessed.nodesTaggedWithAny(CLASS_VARIABLE_ACCESS); Q localVariables = variablesAccessed.difference(instanceVariablesAccessed, classVariablesAccessed); Q interproceduralDataFlowEdges = Common.universe().edgesTaggedWithAny(XCSG.InterproceduralDataFlow); Q fieldsAccessed = interproceduralDataFlowEdges.predecessors(instanceVariablesAccessed.union(classVariablesAccessed)); return localVariables.union(fieldsAccessed).eval().nodes(); } /** * Returns the containing method of a given graph element or null if one is not found * @param node * @return */ public static Node getContainingMethod(Node node) { // NOTE: the enclosing method may be two steps or more above return getContainingNode(node, XCSG.Method); } /** * Find the next immediate containing node with the given tag. * * @param node * @param containingTag * @return the next immediate containing node, or null if none exists; never returns the given node */ public static Node getContainingNode(Node node, String containingTag) { if(node == null){ return null; } while(true) { GraphElement containsEdge = Graph.U.edges(node, NodeDirection.IN).taggedWithAll(XCSG.Contains).getFirst(); if(containsEdge == null){ return null; } Node parent = containsEdge.getNode(EdgeDirection.FROM); if(parent.taggedWith(containingTag)){ return parent; } node = parent; } } }
some defensive code to get past a known Atlas issue
com.ensoftcorp.open.purity/src/com/ensoftcorp/open/purity/analysis/Utilities.java
some defensive code to get past a known Atlas issue
Java
mit
0e351a364fd01e2a41695a562877bddca9c0a20e
0
korobi/ElasticMangos
package io.korobi.mongotoelastic.mongo; import javax.inject.Singleton; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; @Singleton /** * In-memory map of channel object IDs so we don't have to hit Mongo * for every chat in order to look them up. * * Note this instance will be shared amongst many threads and should * be thread-safe. */ public class ChannelIdMemoryCache implements IChannelIdLookup { public static final int INITIAL_NETWORK_CAPACITY = 10; public static final int INITIAL_CHANNEL_CAPACITY = 50; private final Map<String, Map<String, String>> cache = new ConcurrentHashMap<>(INITIAL_NETWORK_CAPACITY); @Override public Optional<String> getChannelObjectId(String network, String channel) { if (cache.containsKey(network)) { return Optional.ofNullable(cache.get(network).get(channel)); } return Optional.empty(); } @Override public void provideChannelId(String objectId, String network, String channel) { if (cache.containsKey(network)) { cache.get(network).put(channel, objectId); } else { // I miss my collection initializers :( /* var dict = new Dictionary<string, string>() { { channel, objectId } }; */ ConcurrentHashMap<String, String> map = new ConcurrentHashMap<>(INITIAL_CHANNEL_CAPACITY); map.put(channel, objectId); cache.put(network, map); } } }
src/main/java/io/korobi/mongotoelastic/mongo/ChannelIdMemoryCache.java
package io.korobi.mongotoelastic.mongo; import javax.inject.Singleton; import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; @Singleton /** * In-memory map of channel object IDs so we don't have to hit Mongo * for every chat in order to look them up. * * Note this instance will be shared amongst many threads and should * be thread-safe. */ public class ChannelIdMemoryCache implements IChannelIdLookup { public static final int INITIAL_NETWORK_CAPACITY = 10; public static final int INITIAL_CHANNEL_CAPACITY = 50; private final Map<String, Map<String, String>> cache = new ConcurrentHashMap<>(INITIAL_NETWORK_CAPACITY); @Override public Optional<String> getChannelObjectId(String network, String channel) { return Optional.empty(); } @Override public void provideChannelId(String objectId, String network, String channel) { if (cache.containsKey(network)) { cache.get(network).put(channel, objectId); } else { // I miss my collection initializers :( /* var dict = new Dictionary<string, string>() { { channel, objectId } }; */ ConcurrentHashMap<String, String> map = new ConcurrentHashMap<>(INITIAL_CHANNEL_CAPACITY); map.put(channel, objectId); cache.put(network, map); } } }
How's this look?
src/main/java/io/korobi/mongotoelastic/mongo/ChannelIdMemoryCache.java
How's this look?
Java
mit
9db4f15464b9c7a5c00fa1643d436873abff0d82
0
Nexmo/nexmo-java-sdk,Nexmo/nexmo-java
/* * Copyright (c) 2011-2017 Nexmo Inc * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.nexmo.client.verify.endpoints; import com.nexmo.client.NexmoResponseParseException; import com.nexmo.client.TestUtils; import com.nexmo.client.verify.SearchRequest; import com.nexmo.client.verify.SearchResult; import org.apache.http.client.methods.RequestBuilder; import org.junit.Before; import org.junit.Test; import javax.xml.parsers.ParserConfigurationException; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import static com.nexmo.client.TestUtils.test429; import static junit.framework.Assert.fail; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.*; public class SearchEndpointTest { private SearchEndpoint client; @Before public void setUp() throws ParserConfigurationException { client = new SearchEndpoint(null); } @Test public void testNoRequestId() throws Exception { try { client.search(); fail("search should fail if no arguments are provided."); } catch (IllegalArgumentException e) { // this is expected } } @Test public void testTooManyRequestIds() throws Exception { try { client.search("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"); fail("search should fail if too many arguments are provided."); } catch (IllegalArgumentException e) { // this is expected } } @Test public void testParseSearchResponse() throws Exception { SearchResult[] rs = client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>SUCCESS</status>\n" + "</verify_request>"); assertEquals(SearchResult.STATUS_OK, rs[0].getStatus()); assertEquals("a-random-request-id", rs[0].getRequestId()); assertEquals(SearchResult.VerificationStatus.SUCCESS, rs[0].getVerificationStatus()); } @Test public void testParseSearchResponseInvalidRoot() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<INCORRECT>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>SUCCESS</status>\n" + "</INCORRECT>"); fail("If the root node is unrecognised, an NexmoResponseParseException should be thrown."); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testParseVerifyRequestXmlNodeBadStatus() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>WHATISTHIS</status>\n" + "</verify_request>"); fail("Invalid status value should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testCheckInvalidStatus() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status>WHATISTHIS</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); fail("Invalid status value should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testBadDateInCheck() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>THIS IS NOT A DATE</date_received>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertNull(rs[0].getChecks().get(0).getDate()); } @Test public void testNullCheckStatus() throws Exception { try { client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status/>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); fail("Null status should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testNullCheckDateReceived() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received/>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertNull(rs[0].getChecks().get(0).getDate()); } @Test public void testParseCheckXmlNodeUnrecognizedElement() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " <THISISEXTRA/>\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertEquals(new GregorianCalendar(2016, 9, 19, 11, 20, 0).getTime(), rs[0].getChecks().get(0).getDate()); } @Test public void testMakeRequestWithOneId() throws Exception { RequestBuilder builder = client.makeRequest(new SearchRequest("1")); assertEquals("POST", builder.getMethod()); Map<String, List<String>> params = TestUtils.makeFullParameterMap(builder.getParameters()); assertThat(params.size(), equalTo(1)); List<String> ids = params.get("request_id"); assertNotNull(ids); assertEquals(1, ids.size()); assertEquals("1", ids.get(0)); } @Test public void testMakeRequestWithMultipleIds() throws Exception { RequestBuilder builder = client.makeRequest(new SearchRequest("1", "2")); assertEquals("POST", builder.getMethod()); Map<String, List<String>> params = TestUtils.makeFullParameterMap(builder.getParameters()); assertThat(params.size(), equalTo(1)); List<String> ids = params.get("request_ids"); assertNotNull(ids); assertEquals(2, ids.size()); assertEquals("1", ids.get(0)); assertEquals("2", ids.get(1)); } @Test public void testRequestThrottleResponse() throws Exception { test429(new SearchEndpoint(null)); } }
src/test/java/com/nexmo/client/verify/endpoints/SearchEndpointTest.java
/* * Copyright (c) 2011-2017 Nexmo Inc * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.nexmo.client.verify.endpoints; import com.nexmo.client.NexmoResponseParseException; import com.nexmo.client.verify.SearchResult; import org.junit.Before; import org.junit.Test; import javax.xml.parsers.ParserConfigurationException; import java.util.GregorianCalendar; import static com.nexmo.client.TestUtils.test429; import static junit.framework.Assert.fail; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; public class SearchEndpointTest { private SearchEndpoint client; @Before public void setUp() throws ParserConfigurationException { client = new SearchEndpoint(null); } @Test public void testNoRequestId() throws Exception { try { client.search(); fail("search should fail if no arguments are provided."); } catch (IllegalArgumentException e) { // this is expected } } @Test public void testTooManyRequestIds() throws Exception { try { client.search("1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11"); fail("search should fail if too many arguments are provided."); } catch (IllegalArgumentException e) { // this is expected } } @Test public void testParseSearchResponse() throws Exception { SearchResult[] rs = client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>SUCCESS</status>\n" + "</verify_request>"); assertEquals(SearchResult.STATUS_OK, rs[0].getStatus()); assertEquals("a-random-request-id", rs[0].getRequestId()); assertEquals(SearchResult.VerificationStatus.SUCCESS, rs[0].getVerificationStatus()); } @Test public void testParseSearchResponseInvalidRoot() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<INCORRECT>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>SUCCESS</status>\n" + "</INCORRECT>"); fail("If the root node is unrecognised, an NexmoResponseParseException should be thrown."); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testParseVerifyRequestXmlNodeBadStatus() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>aaa</date_submitted>\n" + " <date_finalized>ddd</date_finalized>\n" + " <checks />\n" + " <first_event_date>bbb</first_event_date>\n" + " <last_event_date>ccc</last_event_date>\n" + " <price>0.10000000</price>\n" + " <currency>EUR</currency>\n" + " <status>WHATISTHIS</status>\n" + "</verify_request>"); fail("Invalid status value should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testCheckInvalidStatus() throws Exception { try { client.parseSearchResponse("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status>WHATISTHIS</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); fail("Invalid status value should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testBadDateInCheck() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>THIS IS NOT A DATE</date_received>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertNull(rs[0].getChecks().get(0).getDate()); } @Test public void testNullCheckStatus() throws Exception { try { client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status/>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); fail("Null status should throw NexmoResponseParseException"); } catch (NexmoResponseParseException e) { // this is expected } } @Test public void testNullCheckDateReceived() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received/>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertNull(rs[0].getChecks().get(0).getDate()); } @Test public void testParseCheckXmlNodeUnrecognizedElement() throws Exception { SearchResult[] rs = client.parseSearchResponse( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n" + "<verify_request>\n" + " <request_id>a-random-request-id</request_id>\n" + " <account_id>account-id</account_id>\n" + " <number>not-a-number</number>\n" + " <sender_id>verify</sender_id>\n" + " <date_submitted>2016-10-19 11:18:56</date_submitted>\n" + " <date_finalized>2016-10-19 11:20:00</date_finalized>\n" + " <checks>\n" + " <check>\n" + " <date_received>2016-10-19 11:20:00</date_received>\n" + " <code>1234</code>\n" + " <status>INVALID</status>\n" + " <ip_address />\n" + " <THISISEXTRA/>\n" + " </check>\n" + " </checks>\n" + " <first_event_date>2016-10-19 11:18:56</first_event_date>\n" + " <last_event_date>2016-10-19 11:18:56</last_event_date>\n" + " <price>0</price>\n" + " <currency>EUR</currency>\n" + " <status>FAILED</status>\n" + "</verify_request>"); assertEquals(new GregorianCalendar(2016, 9, 19, 11, 20, 0).getTime(), rs[0].getChecks().get(0).getDate()); } @Test public void testRequestThrottleResponse() throws Exception { test429(new SearchEndpoint(null)); } }
Added test for deprecated search endpoint.
src/test/java/com/nexmo/client/verify/endpoints/SearchEndpointTest.java
Added test for deprecated search endpoint.
Java
mit
877daabab1443e29c8bb620110fef602353687dd
0
Spectrumis/Chess-Game-GTU
import java.io.*; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Random; /** * Created by GozdeDogan on 14.11.2016. * Implemented by GozdeDogan, AliEmreBuyukersoy and YasinTuluman. */ public class Game implements Serializable { private static int isComputerOn = 0; //oyuncu vs computer ilk basta kapalı /** Degerler soyle: * 0 = No Computer (player vs player) * 1 = Easy * 2 = Normal * 3 = Hard */ private static boolean currentPlayer = true; //true = beyaz beyaz baslar private Cell tempCell = new Cell(); //Bu obje play methodunun bir onceki tıklanan buttonu tutabilmesi icin var private ArrayList<ArrayList<Cell>> board; private Cell[][] removesss; //Geri alma islemleri icin tutulacak Cell arrayi, her yerden ulasilabilsin diye Game classinin bir attribute'u private static int counterRemovess = 0; //Geri alma islemi icin sayac public Game(){ removesss = new Cell[64][2]; board = new ArrayList<ArrayList<Cell>>(8); for(int i =0; i < 8; ++i){ board.add(new ArrayList<Cell>()); for(int j =0; j < 8; ++j) { board.get(i).add(new Cell()); } } this.initBoard(); //this.printBoard(); this.tempCell = new Cell(); } //Bunu kaydededilmiş hali diye düşündüm public Game(Game game){ board = new ArrayList<ArrayList<Cell>>(8); for(int i =0; i < 8; ++i){ board.add(new ArrayList<Cell>()); for(int j =0; j < 8; ++j) { board.get(i).add(new Cell()); } } removesss = new Cell[64][2]; Cell temp = null; for(int i=0; i<8; i++){ for(int j=0; j<8; j++){ temp.setCell(board.get(i).get(j)); } } } /** * Kullanicinin son tiklamasini alir ve eger depolanmis bisey yoksa ve tiklamasi bos veya karsi takimin tasiysa * 0 return eder. Eger bi tasa tiklandiysa 1 return eder ve gidebilecegi yerleri TempMovesList'de depolar * Depolanmis yerlerden birine tiklandiginda ise 2 return eder ve oynanicak tasin yerini ve oynancak celli * TempMovesList'de depolar * Hicbir sey yapilmadiysa da -1 return eder. ( Aslinda bu Exception icin ) * @param x * @param y * @param TempMovesList * @return */ public int playGame(int x, int y, List<Cell> TempMovesList){ int status=0; try { Cell currentCell = new Cell(board.get(x).get(y)); Cell emptyCell = new Cell(); int king; king = kingPosition(getCurrentPlayer()); //oyun sonu geldiyse 3 dondurur if(isEnd()){ return 3; } //burada sah tehlikedemi diye bakar if(board.get(king/10).get(king-(king/10)*10).getPiece().inDanger(board)){ if(x != king/10 || y != king-(king/10)*10){ return 0; } } /* Verilen cell bos ise veya kendi tasimiz yoksa ... */ if (currentCell.getPiece() instanceof NoPiece || currentCell.getPiece().getColor() != getCurrentPlayer()) { /* ... ve daha once oynatabilecegimiz biseye tiklamadiysak sifir return edicez demektir*/ if (TempMovesList.size() == 0) { System.out.print("debug1\n"); return 0; } /* ... ve daha once oynatabilecegimiz bir tasa tikladiysak ...*/ else { /* ... tiklanan yer TempMoveList'te yani oynanabilir hamlelerde varmi diye bakiyoruz */ /* eger var ise hamlemizi yapiyoruz ve listemizi temizleyip icine hamle source ve targetini atiyoruz*/ if (playUser(TempMovesList, currentCell)) { System.out.print("debug2\n"); makeMove(getTempCell(), currentCell); TempMovesList.clear(); setTempCell(emptyCell); /* suanki oyuncu degerini degistiriyoruz */ setCurrentPlayer(!getCurrentPlayer()); /* ve 2 komutunu return ederek hamle yaptik diyoruz */ return 2; } /* eger tiklanan yer listede yoksa TempMovesListesini temizleyip sifir return ediyoruz */ else { System.out.print("debug3\n"); TempMovesList.clear(); setTempCell(emptyCell); return 0; } } } else { /* Verilen cellde bi tas varsa TempMoveList'imize tasin oynayabilecegi yerlerin listesi aticaz ve 1 return edicez ve kullanicinin targeti secmesini beklemek uzere beklemeye gecicez */ System.out.print("debug4\n"); tempCell.setCell(currentCell); TempMovesList.addAll(board.get(x).get(y).getPiece().checkMove(board, x, y)); return 1; } } catch (NullPointerException e){ return -1; } //ONEMLI: Her hamlenin kaydedilmesi gerekir, bir arraye. //Bu sekilde geri al butonuna basildiginda hamleleri burdan bakarak geri alabiliriz //Geri alinan her hamle de bu arrayden silinmeli } /** * User-User secildiginde bu fonksiyon cagrilacak * Bu fonksiyon sadece user dan gelen hamle dogru ise hareket ettirir, degilse hatali * secim yaptigini soyler ve tekrar secim yapmasini ister * Bu fonksiyon computer-user secildiginde de kullanilacak, böylelikle user hamleleri kontrol edilecek * Eger secim dogruysa 1 yanlissa 0 return eder * @return */ public boolean playUser(List<Cell> cellList, Cell cell ) { int lengt = cellList.size(); for (int i = 0; i < lengt; ++i) { if(cellList.get(i).equals(cell)) return true; } return false; } /** * board uzerinde degisiklik yapicak olan methodumuz. Source daki tasimizi target'a tasiyacak * @param source * @param target */ public void makeMove(Cell source, Cell target){ System.out.print("Movemakera girdi\n"); board.get(target.getX()).get(target.getY()).setPiece(source.getPiece()); Pieces piece = new NoPiece(); board.get(source.getX()).get(source.getY()).setPiece(piece); //yapilan hamle arraye kaydedildi, kaynak cell ve source cell olarak! //removesss[removesss.length][0].setCell(source); //removesss[removesss.length][1].setCell(target); } /** * Computer icin , easy mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * * tum board tek tek dolasilir. * boardin uzerinde player ile aynı renge sahip taslar bulundugunda * bu tasin olasi yapabilecegi tum hamleler bir listde tutulur * daha sonra random sayi olusturularak bu listden bir hamlenin * sirasi secilir ve return listi 2 eleman icerir. * ilk elemani source'dur.ikinci elemani target'tir. * Lutfen bu methodu kullanicak arkadaslar bu sirayi * dikkate aliniz. * * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public List<Cell> playComputerEasy() { boolean player = getCurrentPlayer(); List<Cell> canMove = new ArrayList<>(); List<Cell> trgtMove = new ArrayList<>(); List<Cell> srcMove = new ArrayList<>(); for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { if(board.get(i).get(j).getPiece() instanceof Pawn){ Pawn wp = new Pawn(); canMove.addAll(wp.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Rook){ Rook wr = new Rook(); canMove.addAll(wr.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Knight){ Knight wkn = new Knight(); canMove.addAll(wkn.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Bishop){ Bishop wb = new Bishop(); canMove.addAll(wb.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(player == board.get(i).get(j).getPiece().getColor()){ if(board.get(i).get(j).getPiece() instanceof King){ King wki = new King(); canMove.addAll(wki.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Queen){ Queen wq = new Queen(); canMove.addAll(wq.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } } } } Random randomGenerator = new Random(); List<Cell> retList = new ArrayList<>(); int randomInt = randomGenerator.nextInt(canMove.size()); makeMove(srcMove.get(randomInt), trgtMove.get(randomInt)); retList.add(srcMove.get(randomInt)); retList.add(trgtMove.get(randomInt)); this.setCurrentPlayer(!player); return retList; } /** * Computer icin , medium mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public void playComputerMedium(){ List<Cell> allMoves = new LinkedList<>(); List<Cell> allSource = new LinkedList<>(); for (int i=0 ; i<7 ; ++i) { for (int j=0 ; j<7 ; ++j) { if (!(board.get(i).get(j).getPiece() instanceof NoPiece)) { if (this.currentPlayer == board.get(i).get(j).getPiece().getColor()) { List<Cell> moves = new LinkedList<>(); moves = board.get(i).get(j).getPiece().checkMove(board,i,j); for (int k=0 ; k<moves.size() ; ++k) { allMoves.add(new Cell(moves.get(k))); allSource.add(new Cell(board.get(i).get(j))); } } } } } Random rand = new Random(); int n = rand.nextInt(allMoves.size()); makeMove(allSource.get(n),allMoves.get(n)); } /** * Computer icin , hard mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public List<Cell> playComputerHard(){ return null; } /** * kapatma dugmesine basildiginda oyunu kaydetmek icin yazilacak fonksiyon */ public void saveGame(){ //Belirlenen bir dosyaya sifreli sekilde tahtanın son durumu kaydedilir try { //dosya classlar ile aynı director icinde olusturuluyor File file = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.txt"); // dosya yoksa olusturuldu if (!file.exists()) file.createNewFile(); FileWriter fw = new FileWriter(file.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); //tahtanin son hali dosyaya yazildi // hucre\nhucre\n......\nhucre\n\n -> 1 row bu sekilde yaziliyor sonra iki \n sonra diger row! for(int i=0; i<8; i++) { for (int j = 0; j < 8; j++) bw.write(board.get(i).get(j).toString() + "\n"); bw.write("\n"); } bw.write("\n"); // board dan sonra 3 tane \n sonra removess //hucre hucre \n sonra diger row for(int i=0; i<removesss.length; i++) { for (int j = 0; j < 2; j++) bw.write(removesss[i][j].toString() + " "); bw.write("\n"); } bw.write("\n"); // son olarak da counterRemovess dosyaya yazilir bw.write(counterRemovess); // dosya kapatilir bw.close(); String key = "I hope this project will finish8"; File inputFile = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.txt"); File encryptedFile = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.encrypted"); try { CryptoUtils.encrypt(key, inputFile, encryptedFile); } catch (CryptoException ex) { System.out.println(ex.getMessage()); ex.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } } /** * oyuna tekrar baslanmak istendiginde yazilacak kod */ public void restartGame(){ //tahta ilk haline getirildi this.initBoard(); //hamlelerin tutuldugu array silindi, tekrar oluşturuldu. removesss = null; removesss = new Cell[64][2]; //geri alinan hamle sayisi da sifirlandi counterRemovess=0; } /** * Geri alma islemine basildiginda en son yapilan hamleyi geri alacak * removesss in 2. sutunundaki cell den 1. sutunundaki Cell hareket edilir * ve bu hamle artık yapilmadi varsayilip removesss dan kaldirilir. */ public void recallMove(){ //removesss.lenght 0 dan buyuk olmalı cunku en az bir hamle yapılmadan geri alma işemi gerçekleştirilemez //yani oyunun başında bu buton calismaz!!! if(counterRemovess <= 5 && removesss.length > 0){ int index = removesss.length; board.get(removesss[index][0].getX()).set(removesss[index][1].getY(), board.get(removesss[index][1].getX()).get(removesss[index][1].getY())); board.get(removesss[index][0].getX()).get(removesss[index][1].getY()).setPiece(board.get(removesss[index][1].getX()).get(removesss[index][1].getY()).getPiece()); //indexteki cell elemanlari silindi. removesss[index][0] = null; removesss[index][1] = null; counterRemovess++; } else System.out.println("INVALID!! You can not go back anymore!!"); } public void initBoard(){ int i; int j; Game.currentPlayer = true; //Bos hucreler for(i=0; i < 8; ++i){ for(j=0; j < 8; ++j){ try { //boş hücreler //boş hücrenin color u ne olacak Pieces piece = new NoPiece(); board.get(i).get(j).setPiece(piece); board.get(i).get(j).piece.setColor(true); //şimdilik true atadım board.get(i).get(j).setX(i); board.get(i).get(j).setY(j); }catch(Exception e) { //System.out.println("Null Pointer "); } } } //piyonların renklerini belirledim ve piyonları yerleştirdim boolean color = false; //siyah for(j=1; j<7; j=j+5) { for (i = 0; i < 8; i++) { try { Pieces piece = new Pawn(); board.get(i).get(j).setPiece(piece); board.get(i).get(j).piece.setColor(color); } catch(Exception e){ } } color =true; } //özel taşlar try { //Siyah, alt, Rook { Pieces piece = new Rook(); board.get(0).get(0).setPiece(piece); } { Pieces piece = new Rook(); board.get(7).get(0).setPiece(piece); } //Beyaz, üst, Rook { Pieces piece = new Rook(); board.get(0).get(7).setPiece(piece); } { Pieces piece = new Rook(); board.get(7).get(7).setPiece(piece); } //Siyah, alt, Knight { Pieces piece = new Knight(); board.get(1).get(0).setPiece(piece); } { Pieces piece = new Knight(); board.get(6).get(0).setPiece(piece); } //Beyaz, üst, Knight { Pieces piece = new Knight(); board.get(1).get(7).setPiece(piece); } { Pieces piece = new Knight(); board.get(6).get(7).setPiece(piece); } //Siyah, alt, Bishop { Pieces piece = new Bishop(); board.get(2).get(0).setPiece(piece); } { Pieces piece = new Bishop(); board.get(5).get(0).setPiece(piece); } //Beyaz, üst, Bishop { Pieces piece = new Bishop(); board.get(2).get(7).setPiece(piece); } { Pieces piece = new Bishop(); board.get(5).get(7).setPiece(piece); } //Siyah, alt, King { Pieces piece = new King(); board.get(3).get(0).setPiece(piece); } //Beyaz, üst, King { Pieces piece = new King(); board.get(3).get(7).setPiece(piece); } //Siyah, alt, Queen { Pieces piece = new Queen(); board.get(4).get(0).setPiece(piece); } //Beyaz, üst, Queen { Pieces piece = new Queen(); board.get(4).get(7).setPiece(piece); } }catch(Exception e) { //System.out.println("Null Pointer "); } //Ozel taslar icin renk belirledim, ust beyaz, alt siyah color = false; for(j=0; j<8; j=j+7) { for (i=0; i < 8; i++) { //System.out.printf("%d ,%d ", i, j); board.get(i).get(j).piece.setColor(color); //System.out.print(board.get(i).get(j).piece.getColor()); //System.out.println(); } color = true; } } /* Burada arrayi nasıl yazacağımı düşünemedim, ekrana basmak için somut bi şey olması lazım ama benim elimde şimdilik somut bi şey yok kontrol ede ede mi gittim ama bilemedim tam 0 -> noPiece Beyaz Siyah 1 -> wpawn -1 -> pawn 2 -> wrook -2 -> rook 3 -> wknight -3 -> knight 4 -> wbishop -4 -> bishop 5 -> wking -5 -> king 6 -> wqueen -6 -> queen */ public void printBoard(){ for(int j=7; j>=0; j--) { for (int i = 0; i <= 7; i++) { if (!board.get(i).get(j).piece.getColor()) { //System.out.println("PrintBoard, false, siyah!!"); if (board.get(i).get(j).getPiece() instanceof Pawn) { System.out.print(" P"); } else if (board.get(i).get(j).getPiece() instanceof Rook) { System.out.print(" K"); } else if (board.get(i).get(j).getPiece() instanceof Knight) { System.out.print(" A"); } else if (board.get(i).get(j).getPiece() instanceof Bishop) { System.out.print(" F"); } else if (board.get(i).get(j).getPiece() instanceof King) { System.out.print(" S"); } else if (board.get(i).get(j).getPiece() instanceof Queen) { System.out.print(" V"); } else System.out.print(" ."); } else if (board.get(i).get(j).piece.getColor()) { //System.out.println("PrintBoard, true, beyaz!!"); if (board.get(i).get(j).getPiece() instanceof Pawn) { System.out.print("-P"); } else if (board.get(i).get(j).getPiece() instanceof Rook) { System.out.print("-K"); } else if (board.get(i).get(j).getPiece() instanceof Knight) { System.out.print("-A"); } else if (board.get(i).get(j).getPiece() instanceof Bishop) { System.out.print("-F"); } else if (board.get(i).get(j).getPiece() instanceof King) { System.out.print("-S"); } else if (board.get(i).get(j).getPiece() instanceof Queen) { System.out.print("-V"); } else System.out.print(" ."); } } System.out.println(); } } public boolean isEnd(){ int counter = 0; for(int i=0; i <= 7; ++i){ for(int j=0; j <= 7; ++j){ if(this.board.get(i).get(j).getPiece() instanceof King){ if(counter == 0){ ++counter; }else{ return false; } } } } return true; } public int kingPosition(boolean kingColor){ for(int i=0; i <= 7; ++i){ for(int j=0; j <= 7; ++j){ if(board.get(i).get(j).getPiece() instanceof King && board.get(i).get(j).getPiece().getColor() == kingColor){ return (10*i+j); } } } System.out.print("KingPosition hatası\n"); return -1; } public static void setCurrentPlayer( boolean cPlayer ) { currentPlayer = cPlayer; } public static boolean getCurrentPlayer() { return currentPlayer; } public static void setIsComputerOn( int isOn ) { isComputerOn = isOn; } public static int getIsComputerOn() { return isComputerOn; } public void setTempCell( Cell target ) { tempCell.setCell(target); } public Cell getTempCell() { return tempCell; } }
src/Game.java
import java.io.*; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Random; /** * Created by GozdeDogan on 14.11.2016. * Implemented by GozdeDogan, AliEmreBuyukersoy and YasinTuluman. */ public class Game implements Serializable { private static int isComputerOn = 0; //oyuncu vs computer ilk basta kapalı /** Degerler soyle: * 0 = No Computer (player vs player) * 1 = Easy * 2 = Normal * 3 = Hard */ private static boolean currentPlayer = true; //true = beyaz beyaz baslar private Cell tempCell = new Cell(); //Bu obje play methodunun bir onceki tıklanan buttonu tutabilmesi icin var private ArrayList<ArrayList<Cell>> board; private Cell[][] removesss; //Geri alma islemleri icin tutulacak Cell arrayi, her yerden ulasilabilsin diye Game classinin bir attribute'u private static int counterRemovess = 0; //Geri alma islemi icin sayac public Game(){ removesss = new Cell[64][2]; board = new ArrayList<ArrayList<Cell>>(8); for(int i =0; i < 8; ++i){ board.add(new ArrayList<Cell>()); for(int j =0; j < 8; ++j) { board.get(i).add(new Cell()); } } this.initBoard(); //this.printBoard(); this.tempCell = new Cell(); } //Bunu kaydededilmiş hali diye düşündüm public Game(Game game){ board = new ArrayList<ArrayList<Cell>>(8); for(int i =0; i < 8; ++i){ board.add(new ArrayList<Cell>()); for(int j =0; j < 8; ++j) { board.get(i).add(new Cell()); } } removesss = new Cell[64][2]; Cell temp = null; for(int i=0; i<8; i++){ for(int j=0; j<8; j++){ temp.setCell(board.get(i).get(j)); } } } /** * Kullanicinin son tiklamasini alir ve eger depolanmis bisey yoksa ve tiklamasi bos veya karsi takimin tasiysa * 0 return eder. Eger bi tasa tiklandiysa 1 return eder ve gidebilecegi yerleri TempMovesList'de depolar * Depolanmis yerlerden birine tiklandiginda ise 2 return eder ve oynanicak tasin yerini ve oynancak celli * TempMovesList'de depolar * Hicbir sey yapilmadiysa da -1 return eder. ( Aslinda bu Exception icin ) * @param x * @param y * @param TempMovesList * @return */ public int playGame(int x, int y, List<Cell> TempMovesList){ int status=0; try { Cell currentCell = new Cell(board.get(x).get(y)); Cell emptyCell = new Cell(); int king; king = kingPosition(getCurrentPlayer()); //oyun sonu geldiyse 3 dondurur if(isEnd()){ return 3; } //burada sah tehlikedemi diye bakar if(board.get(king/10).get(king-(king/10)*10).getPiece().inDanger(board)){ if(x != king/10 || y != king-(king/10)*10){ return 0; } } /* Verilen cell bos ise veya kendi tasimiz yoksa ... */ if (currentCell.getPiece() instanceof NoPiece || currentCell.getPiece().getColor() != getCurrentPlayer()) { /* ... ve daha once oynatabilecegimiz biseye tiklamadiysak sifir return edicez demektir*/ if (TempMovesList.size() == 0) { System.out.print("debug1\n"); return 0; } /* ... ve daha once oynatabilecegimiz bir tasa tikladiysak ...*/ else { /* ... tiklanan yer TempMoveList'te yani oynanabilir hamlelerde varmi diye bakiyoruz */ /* eger var ise hamlemizi yapiyoruz ve listemizi temizleyip icine hamle source ve targetini atiyoruz*/ if (playUser(TempMovesList, currentCell)) { System.out.print("debug2\n"); makeMove(getTempCell(), currentCell); TempMovesList.clear(); setTempCell(emptyCell); /* suanki oyuncu degerini degistiriyoruz */ setCurrentPlayer(!getCurrentPlayer()); /* ve 2 komutunu return ederek hamle yaptik diyoruz */ return 2; } /* eger tiklanan yer listede yoksa TempMovesListesini temizleyip sifir return ediyoruz */ else { System.out.print("debug3\n"); TempMovesList.clear(); setTempCell(emptyCell); return 0; } } } else { /* Verilen cellde bi tas varsa TempMoveList'imize tasin oynayabilecegi yerlerin listesi aticaz ve 1 return edicez ve kullanicinin targeti secmesini beklemek uzere beklemeye gecicez */ System.out.print("debug4\n"); tempCell.setCell(currentCell); TempMovesList.addAll(board.get(x).get(y).getPiece().checkMove(board, x, y)); return 1; } } catch (NullPointerException e){ return -1; } //ONEMLI: Her hamlenin kaydedilmesi gerekir, bir arraye. //Bu sekilde geri al butonuna basildiginda hamleleri burdan bakarak geri alabiliriz //Geri alinan her hamle de bu arrayden silinmeli } /** * User-User secildiginde bu fonksiyon cagrilacak * Bu fonksiyon sadece user dan gelen hamle dogru ise hareket ettirir, degilse hatali * secim yaptigini soyler ve tekrar secim yapmasini ister * Bu fonksiyon computer-user secildiginde de kullanilacak, böylelikle user hamleleri kontrol edilecek * Eger secim dogruysa 1 yanlissa 0 return eder * @return */ public boolean playUser(List<Cell> cellList, Cell cell ) { int lengt = cellList.size(); for (int i = 0; i < lengt; ++i) { if(cellList.get(i).equals(cell)) return true; } return false; } /** * board uzerinde degisiklik yapicak olan methodumuz. Source daki tasimizi target'a tasiyacak * @param source * @param target */ public void makeMove(Cell source, Cell target){ System.out.print("Movemakera girdi\n"); board.get(target.getX()).get(target.getY()).setPiece(source.getPiece()); Pieces piece = new NoPiece(); board.get(source.getX()).get(source.getY()).setPiece(piece); //yapilan hamle arraye kaydedildi, kaynak cell ve source cell olarak! //removesss[removesss.length][0].setCell(source); //removesss[removesss.length][1].setCell(target); } /** * Computer icin , easy mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * * tum board tek tek dolasilir. * boardin uzerinde player ile aynı renge sahip taslar bulundugunda * bu tasin olasi yapabilecegi tum hamleler bir listde tutulur * daha sonra random sayi olusturularak bu listden bir hamlenin * sirasi secilir ve return listi 2 eleman icerir. * ilk elemani source'dur.ikinci elemani target'tir. * Lutfen bu methodu kullanicak arkadaslar bu sirayi * dikkate aliniz. * * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public List<Cell> playComputerEasy() { boolean player = getCurrentPlayer(); List<Cell> canMove = new ArrayList<>(); List<Cell> trgtMove = new ArrayList<>(); List<Cell> srcMove = new ArrayList<>(); for (int i = 0; i < 8; i++) { for (int j = 0; j < 8; j++) { if(board.get(i).get(j).getPiece() instanceof Pawn){ Pawn wp = new Pawn(); canMove.addAll(wp.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Rook){ Rook wr = new Rook(); canMove.addAll(wr.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Knight){ Knight wkn = new Knight(); canMove.addAll(wkn.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Bishop){ Bishop wb = new Bishop(); canMove.addAll(wb.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(player == board.get(i).get(j).getPiece().getColor()){ if(board.get(i).get(j).getPiece() instanceof King){ King wki = new King(); canMove.addAll(wki.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } else if(board.get(i).get(j).getPiece() instanceof Queen){ Queen wq = new Queen(); canMove.addAll(wq.checkMove(board, i, j)); for (int k = 0; k < canMove.size(); k++) { srcMove.add(new Cell(board.get(i).get(j))); } trgtMove.addAll(canMove); canMove.removeAll(srcMove); } } } } Random randomGenerator = new Random(); List<Cell> retList = new ArrayList<>(); int randomInt = randomGenerator.nextInt(canMove.size()); makeMove(srcMove.get(randomInt), trgtMove.get(randomInt)); retList.add(srcMove.get(randomInt)); retList.add(trgtMove.get(randomInt)); this.setCurrentPlayer(!player); return retList; } /** * Computer icin , medium mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public void playComputerMedium(){ List<Cell> allMoves = new LinkedList<>(); //List<Cell> allSource = new LinkedList<>(); for (int i=0 ; i<7 ; ++i) { for (int j=0 ; j<7 ; ++j) { if (!(board.get(i).get(j).getPiece() instanceof NoPiece)) { if (this.currentPlayer == board.get(i).get(j).getPiece().getColor()) { List<Cell> moves = new LinkedList<>(); moves = board.get(i).get(j).getPiece().checkMove(board,i,j); for (int k=0 ; k<moves.size() ; ++k) { allMoves.add(new Cell(moves.get(k))); } } } } } Random rand = new Random(); int n = rand.nextInt(allMoves.size()); makeMove(); } /** * Computer icin , hard mod secildiginde bu fonksiyon cagrilacak * Method icinde currentPlayer degistirilir * @return source ve targetin sirali olarak bulundugu bir cell listesi */ public List<Cell> playComputerHard(){ return null; } /** * kapatma dugmesine basildiginda oyunu kaydetmek icin yazilacak fonksiyon */ public void saveGame(){ //Belirlenen bir dosyaya sifreli sekilde tahtanın son durumu kaydedilir try { //dosya classlar ile aynı director icinde olusturuluyor File file = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.txt"); // dosya yoksa olusturuldu if (!file.exists()) file.createNewFile(); FileWriter fw = new FileWriter(file.getAbsoluteFile()); BufferedWriter bw = new BufferedWriter(fw); //tahtanin son hali dosyaya yazildi // hucre\nhucre\n......\nhucre\n\n -> 1 row bu sekilde yaziliyor sonra iki \n sonra diger row! for(int i=0; i<8; i++) { for (int j = 0; j < 8; j++) bw.write(board.get(i).get(j).toString() + "\n"); bw.write("\n"); } bw.write("\n"); // board dan sonra 3 tane \n sonra removess //hucre hucre \n sonra diger row for(int i=0; i<removesss.length; i++) { for (int j = 0; j < 2; j++) bw.write(removesss[i][j].toString() + " "); bw.write("\n"); } bw.write("\n"); // son olarak da counterRemovess dosyaya yazilir bw.write(counterRemovess); // dosya kapatilir bw.close(); String key = "I hope this project will finish8"; File inputFile = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.txt"); File encryptedFile = new File("/Users/Desktop/Chess-Game-GTU/Chess-Game-GTU/src/sample/saveGame.encrypted"); try { CryptoUtils.encrypt(key, inputFile, encryptedFile); } catch (CryptoException ex) { System.out.println(ex.getMessage()); ex.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } } /** * oyuna tekrar baslanmak istendiginde yazilacak kod */ public void restartGame(){ //tahta ilk haline getirildi this.initBoard(); //hamlelerin tutuldugu array silindi, tekrar oluşturuldu. removesss = null; removesss = new Cell[64][2]; //geri alinan hamle sayisi da sifirlandi counterRemovess=0; } /** * Geri alma islemine basildiginda en son yapilan hamleyi geri alacak * removesss in 2. sutunundaki cell den 1. sutunundaki Cell hareket edilir * ve bu hamle artık yapilmadi varsayilip removesss dan kaldirilir. */ public void recallMove(){ //removesss.lenght 0 dan buyuk olmalı cunku en az bir hamle yapılmadan geri alma işemi gerçekleştirilemez //yani oyunun başında bu buton calismaz!!! if(counterRemovess <= 5 && removesss.length > 0){ int index = removesss.length; board.get(removesss[index][0].getX()).set(removesss[index][1].getY(), board.get(removesss[index][1].getX()).get(removesss[index][1].getY())); board.get(removesss[index][0].getX()).get(removesss[index][1].getY()).setPiece(board.get(removesss[index][1].getX()).get(removesss[index][1].getY()).getPiece()); //indexteki cell elemanlari silindi. removesss[index][0] = null; removesss[index][1] = null; counterRemovess++; } else System.out.println("INVALID!! You can not go back anymore!!"); } public void initBoard(){ int i; int j; Game.currentPlayer = true; //Bos hucreler for(i=0; i < 8; ++i){ for(j=0; j < 8; ++j){ try { //boş hücreler //boş hücrenin color u ne olacak Pieces piece = new NoPiece(); board.get(i).get(j).setPiece(piece); board.get(i).get(j).piece.setColor(true); //şimdilik true atadım board.get(i).get(j).setX(i); board.get(i).get(j).setY(j); }catch(Exception e) { //System.out.println("Null Pointer "); } } } //piyonların renklerini belirledim ve piyonları yerleştirdim boolean color = false; //siyah for(j=1; j<7; j=j+5) { for (i = 0; i < 8; i++) { try { Pieces piece = new Pawn(); board.get(i).get(j).setPiece(piece); board.get(i).get(j).piece.setColor(color); } catch(Exception e){ } } color =true; } //özel taşlar try { //Siyah, alt, Rook { Pieces piece = new Rook(); board.get(0).get(0).setPiece(piece); } { Pieces piece = new Rook(); board.get(7).get(0).setPiece(piece); } //Beyaz, üst, Rook { Pieces piece = new Rook(); board.get(0).get(7).setPiece(piece); } { Pieces piece = new Rook(); board.get(7).get(7).setPiece(piece); } //Siyah, alt, Knight { Pieces piece = new Knight(); board.get(1).get(0).setPiece(piece); } { Pieces piece = new Knight(); board.get(6).get(0).setPiece(piece); } //Beyaz, üst, Knight { Pieces piece = new Knight(); board.get(1).get(7).setPiece(piece); } { Pieces piece = new Knight(); board.get(6).get(7).setPiece(piece); } //Siyah, alt, Bishop { Pieces piece = new Bishop(); board.get(2).get(0).setPiece(piece); } { Pieces piece = new Bishop(); board.get(5).get(0).setPiece(piece); } //Beyaz, üst, Bishop { Pieces piece = new Bishop(); board.get(2).get(7).setPiece(piece); } { Pieces piece = new Bishop(); board.get(5).get(7).setPiece(piece); } //Siyah, alt, King { Pieces piece = new King(); board.get(3).get(0).setPiece(piece); } //Beyaz, üst, King { Pieces piece = new King(); board.get(3).get(7).setPiece(piece); } //Siyah, alt, Queen { Pieces piece = new Queen(); board.get(4).get(0).setPiece(piece); } //Beyaz, üst, Queen { Pieces piece = new Queen(); board.get(4).get(7).setPiece(piece); } }catch(Exception e) { //System.out.println("Null Pointer "); } //Ozel taslar icin renk belirledim, ust beyaz, alt siyah color = false; for(j=0; j<8; j=j+7) { for (i=0; i < 8; i++) { //System.out.printf("%d ,%d ", i, j); board.get(i).get(j).piece.setColor(color); //System.out.print(board.get(i).get(j).piece.getColor()); //System.out.println(); } color = true; } } /* Burada arrayi nasıl yazacağımı düşünemedim, ekrana basmak için somut bi şey olması lazım ama benim elimde şimdilik somut bi şey yok kontrol ede ede mi gittim ama bilemedim tam 0 -> noPiece Beyaz Siyah 1 -> wpawn -1 -> pawn 2 -> wrook -2 -> rook 3 -> wknight -3 -> knight 4 -> wbishop -4 -> bishop 5 -> wking -5 -> king 6 -> wqueen -6 -> queen */ public void printBoard(){ for(int j=7; j>=0; j--) { for (int i = 0; i <= 7; i++) { if (!board.get(i).get(j).piece.getColor()) { //System.out.println("PrintBoard, false, siyah!!"); if (board.get(i).get(j).getPiece() instanceof Pawn) { System.out.print(" P"); } else if (board.get(i).get(j).getPiece() instanceof Rook) { System.out.print(" K"); } else if (board.get(i).get(j).getPiece() instanceof Knight) { System.out.print(" A"); } else if (board.get(i).get(j).getPiece() instanceof Bishop) { System.out.print(" F"); } else if (board.get(i).get(j).getPiece() instanceof King) { System.out.print(" S"); } else if (board.get(i).get(j).getPiece() instanceof Queen) { System.out.print(" V"); } else System.out.print(" ."); } else if (board.get(i).get(j).piece.getColor()) { //System.out.println("PrintBoard, true, beyaz!!"); if (board.get(i).get(j).getPiece() instanceof Pawn) { System.out.print("-P"); } else if (board.get(i).get(j).getPiece() instanceof Rook) { System.out.print("-K"); } else if (board.get(i).get(j).getPiece() instanceof Knight) { System.out.print("-A"); } else if (board.get(i).get(j).getPiece() instanceof Bishop) { System.out.print("-F"); } else if (board.get(i).get(j).getPiece() instanceof King) { System.out.print("-S"); } else if (board.get(i).get(j).getPiece() instanceof Queen) { System.out.print("-V"); } else System.out.print(" ."); } } System.out.println(); } } public boolean isEnd(){ int counter = 0; for(int i=0; i <= 7; ++i){ for(int j=0; j <= 7; ++j){ if(this.board.get(i).get(j).getPiece() instanceof King){ if(counter == 0){ ++counter; }else{ return false; } } } } return true; } public int kingPosition(boolean kingColor){ for(int i=0; i <= 7; ++i){ for(int j=0; j <= 7; ++j){ if(board.get(i).get(j).getPiece() instanceof King && board.get(i).get(j).getPiece().getColor() == kingColor){ return (10*i+j); } } } System.out.print("KingPosition hatası\n"); return -1; } public static void setCurrentPlayer( boolean cPlayer ) { currentPlayer = cPlayer; } public static boolean getCurrentPlayer() { return currentPlayer; } public static void setIsComputerOn( int isOn ) { isComputerOn = isOn; } public static int getIsComputerOn() { return isComputerOn; } public void setTempCell( Cell target ) { tempCell.setCell(target); } public Cell getTempCell() { return tempCell; } }
playComputerMedium a random computer play eklendi test edilecek.
src/Game.java
playComputerMedium a random computer play eklendi test edilecek.
Java
mit
e86d3e44affdc2fe7ea614a5a7c8423d85f79922
0
elBukkit/MagicPlugin,elBukkit/MagicPlugin,elBukkit/MagicPlugin
package com.elmakers.mine.bukkit.wand; import static com.google.common.base.Verify.verifyNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.logging.Level; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Color; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.Particle; import org.bukkit.block.BlockFace; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.MemoryConfiguration; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.bukkit.event.inventory.InventoryType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryView; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.plugin.Plugin; import org.bukkit.potion.PotionEffectType; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.api.block.BrushMode; import com.elmakers.mine.bukkit.api.economy.Currency; import com.elmakers.mine.bukkit.api.event.WandPreActivateEvent; import com.elmakers.mine.bukkit.api.item.ItemData; import com.elmakers.mine.bukkit.api.magic.MageClassTemplate; import com.elmakers.mine.bukkit.api.magic.MageController; import com.elmakers.mine.bukkit.api.magic.MagicProperties; import com.elmakers.mine.bukkit.api.magic.MaterialSet; import com.elmakers.mine.bukkit.api.magic.Messages; import com.elmakers.mine.bukkit.api.spell.CostReducer; import com.elmakers.mine.bukkit.api.spell.Spell; import com.elmakers.mine.bukkit.api.spell.SpellKey; import com.elmakers.mine.bukkit.api.spell.SpellTemplate; import com.elmakers.mine.bukkit.api.wand.WandAction; import com.elmakers.mine.bukkit.block.MaterialAndData; import com.elmakers.mine.bukkit.block.MaterialBrush; import com.elmakers.mine.bukkit.effect.EffectPlayer; import com.elmakers.mine.bukkit.effect.SoundEffect; import com.elmakers.mine.bukkit.effect.WandEffectContext; import com.elmakers.mine.bukkit.effect.builtin.EffectRing; import com.elmakers.mine.bukkit.heroes.HeroesManager; import com.elmakers.mine.bukkit.magic.BaseMagicConfigurable; import com.elmakers.mine.bukkit.magic.Mage; import com.elmakers.mine.bukkit.magic.MageClass; import com.elmakers.mine.bukkit.magic.MageParameters; import com.elmakers.mine.bukkit.magic.MagicAttribute; import com.elmakers.mine.bukkit.magic.MagicController; import com.elmakers.mine.bukkit.magic.MagicPropertyType; import com.elmakers.mine.bukkit.utility.ColorHD; import com.elmakers.mine.bukkit.utility.CompatibilityUtils; import com.elmakers.mine.bukkit.utility.ConfigurationUtils; import com.elmakers.mine.bukkit.utility.DeprecatedUtils; import com.elmakers.mine.bukkit.utility.InventoryUtils; import com.elmakers.mine.bukkit.utility.NMSUtils; import com.google.common.base.Preconditions; public class Wand extends WandProperties implements CostReducer, com.elmakers.mine.bukkit.api.wand.Wand { public static final int OFFHAND_SLOT = 40; public static final int INVENTORY_SIZE = 27; public static final int PLAYER_INVENTORY_SIZE = 36; public static final int INVENTORY_ORGANIZE_BUFFER = 4; public static final int HOTBAR_SIZE = 9; public static final int HOTBAR_INVENTORY_SIZE = HOTBAR_SIZE - 1; public static final float DEFAULT_SPELL_COLOR_MIX_WEIGHT = 0.0001f; public static boolean FILL_CREATOR = false; public static Vector DEFAULT_CAST_OFFSET = new Vector(0, 0, 0.5); public static String DEFAULT_WAND_TEMPLATE = "default"; private static final String[] EMPTY_PARAMETERS = new String[0]; private static final Random random = new Random(); /** * The item as it appears in the inventory of the player. */ protected @Nullable ItemStack item; /** * The currently active mage. * * <p>Is only set when the wand is active or when the wand is * used for off-hand casting. */ protected @Nullable Mage mage; protected @Nullable WandEffectContext effectContext; // Cached state private String id = ""; private List<Inventory> hotbars; private List<Inventory> inventories; private Map<String, Integer> spellInventory = new HashMap<>(); private Set<String> spells = new LinkedHashSet<>(); private Map<String, Integer> spellLevels = new HashMap<>(); private Map<String, Integer> brushInventory = new HashMap<>(); private Set<String> brushes = new LinkedHashSet<>(); private String activeSpell = ""; private String alternateSpell = ""; private String alternateSpell2 = ""; private String activeBrush = ""; protected String wandName = ""; protected String description = ""; private String owner = ""; private String ownerId = ""; private String template = ""; private String path = ""; private List<String> mageClassKeys = null; private boolean superProtected = false; private boolean superPowered = false; private boolean glow = false; private boolean bound = false; private boolean indestructible = false; private boolean undroppable = false; private boolean keep = false; private boolean passive = false; private boolean autoOrganize = false; private boolean autoAlphabetize = false; private boolean autoFill = false; private boolean isUpgrade = false; private boolean randomizeOnActivate = true; private boolean rename = false; private boolean renameDescription = false; private boolean quickCast = false; private boolean quickCastDisabled = false; private boolean manualQuickCastDisabled = false; private boolean isInOffhand = false; private boolean hasId = false; private boolean suspendUpdate = false; private int inventoryRows = 1; private Vector castLocation; private WandAction leftClickAction = WandAction.NONE; private WandAction rightClickAction = WandAction.NONE; private WandAction dropAction = WandAction.NONE; private WandAction swapAction = WandAction.NONE; private MaterialAndData icon = null; private MaterialAndData upgradeIcon = null; private MaterialAndData inactiveIcon = null; private int inactiveIconDelay = 0; private String upgradeTemplate = null; protected float consumeReduction = 0; protected float cooldownReduction = 0; protected float costReduction = 0; protected Map<String, Double> protection; private float power = 0; private float earnMultiplier = 1; private float blockFOV = 0; private float blockChance = 0; private float blockReflectChance = 0; private int blockMageCooldown = 0; private int blockCooldown = 0; private int maxEnchantCount = 0; private int enchantCount = 0; private boolean hasInventory = false; private boolean locked = false; private boolean lockedAllowUpgrades = false; private boolean forceUpgrade = false; private boolean isHeroes = false; private int uses = 0; private boolean hasUses = false; private boolean isSingleUse = false; private boolean limitSpellsToPath = false; private boolean limitBrushesToPath = false; private Currency currencyDisplay = null; private float manaPerDamage = 0; private ColorHD effectColor = null; private float effectColorSpellMixWeight = DEFAULT_SPELL_COLOR_MIX_WEIGHT; private Particle effectParticle = null; private float effectParticleData = 0; private int effectParticleCount = 0; private int effectParticleInterval = 0; private double effectParticleMinVelocity = 0; private double effectParticleRadius = 0; private double effectParticleOffset = 0; private boolean effectBubbles = false; private boolean activeEffectsOnly = false; private EffectRing effectPlayer = null; private int castInterval = 0; private double castMinVelocity = 0; private Vector castVelocityDirection = null; private String castSpell = null; private ConfigurationSection castParameters = null; private Map<PotionEffectType, Integer> potionEffects = new HashMap<>(); private SoundEffect effectSound = null; private int effectSoundInterval = 0; private int quietLevel = 0; private Map<String, String> castOverrides = null; // Transient state private boolean hasSpellProgression = false; private long lastSoundEffect; private long lastParticleEffect; private long lastSpellCast; // Inventory functionality private WandMode mode = null; private WandMode brushMode = null; private int openInventoryPage = 0; private boolean inventoryIsOpen = false; private boolean inventoryWasOpen = false; private Inventory displayInventory = null; private int currentHotbar = 0; public static WandManaMode manaMode = WandManaMode.BAR; public static WandManaMode currencyMode = WandManaMode.NUMBER; public static boolean regenWhileInactive = true; public static Material DefaultUpgradeMaterial = Material.NETHER_STAR; public static Material DefaultWandMaterial = Material.BLAZE_ROD; public static Material EnchantableWandMaterial = null; public static boolean SpellGlow = false; public static boolean BrushGlow = false; public static boolean BrushItemGlow = true; public static boolean LiveHotbar = true; public static boolean LiveHotbarSkills = false; public static boolean LiveHotbarCooldown = true; public static boolean Unbreakable = false; public static boolean Unstashable = true; public static SoundEffect inventoryOpenSound = null; public static SoundEffect inventoryCloseSound = null; public static SoundEffect inventoryCycleSound = null; public static SoundEffect noActionSound = null; public static SoundEffect itemPickupSound = null; public static String WAND_KEY = "wand"; public static String UPGRADE_KEY = "wand_upgrade"; public static String WAND_SELF_DESTRUCT_KEY = null; public static byte HIDE_FLAGS = 63; public static String brushSelectSpell = ""; private Inventory storedInventory = null; private int heldSlot = 0; public Wand(MagicController controller) { super(controller); hotbars = new ArrayList<>(); inventories = new ArrayList<>(); } /** * @deprecated Use {@link MagicController#getWand(ItemStack)}. */ @Deprecated public Wand(MagicController controller, ItemStack itemStack) { this(controller); Preconditions.checkNotNull(itemStack); if (itemStack.getType() == Material.AIR) { itemStack.setType(DefaultWandMaterial); } this.icon = new MaterialAndData(itemStack); item = itemStack; boolean needsSave = false; boolean isWand = isWand(item); boolean isUpgradeItem = isUpgrade(item); if (isWand || isUpgradeItem) { ConfigurationSection wandConfig = itemToConfig(item, new MemoryConfiguration()); // Check for template migration WandTemplate wandTemplate = controller.getWandTemplate(wandConfig.getString("template")); WandTemplate migrateTemplate = wandTemplate == null ? null : wandTemplate.getMigrateTemplate(); if (migrateTemplate != null) { wandConfig.set("template", migrateTemplate.getKey()); } // Check for wand data migration int version = wandConfig.getInt("version", 0); if (version < CURRENT_VERSION) { // Migration will be handled by CasterProperties, this is just here // So that we save the data after to avoid re-migrating. needsSave = true; } randomizeOnActivate = !wandConfig.contains("icon"); load(wandConfig); } else { updateIcon(); needsSave = true; } loadProperties(); // Migrate old upgrade items if ((isUpgrade || isUpgradeItem) && isWand) { needsSave = true; InventoryUtils.removeMeta(item, WAND_KEY); } if (needsSave) { saveState(); updateName(); updateLore(); } } public Wand(MagicController controller, ConfigurationSection config) { this(controller, DefaultWandMaterial, (short)0); load(config); loadProperties(); updateName(); updateLore(); saveState(); } protected Wand(MagicController controller, String templateName) throws UnknownWandException { this(controller); // Default to "default" wand if (templateName == null || templateName.length() == 0) { templateName = DEFAULT_WAND_TEMPLATE; } // Check for randomized/pre-enchanted wands int level = 0; if (templateName.contains("(")) { String levelString = templateName.substring(templateName.indexOf('(') + 1, templateName.length() - 1); try { level = Integer.parseInt(levelString); } catch (Exception ex) { throw new IllegalArgumentException(ex); } templateName = templateName.substring(0, templateName.indexOf('(')); } WandTemplate template = controller.getWandTemplate(templateName); if (template == null) { throw new UnknownWandException(templateName); } WandTemplate migrateTemplate = template.getMigrateTemplate(); if (migrateTemplate != null) { template = migrateTemplate; templateName = migrateTemplate.getKey(); } setTemplate(templateName); setProperty("version", CURRENT_VERSION); ConfigurationSection templateConfig = template.getConfiguration(); if (templateConfig == null) { throw new UnknownWandException(templateName); } // Load all properties loadProperties(); // Enchant, if an enchanting level was provided if (level > 0) { // Account for randomized locked wands boolean wasLocked = locked; locked = false; randomize(level, false, null, true); locked = wasLocked; } // Don't randomize now if set to randomize later // Otherwise, do this here so the description updates if (!randomizeOnActivate) { randomize(); } updateName(); updateLore(); saveState(); } public Wand(MagicController controller, Material icon, short iconData) { // This will make the Bukkit ItemStack into a real ItemStack with NBT data. this(controller, InventoryUtils.makeReal(new ItemStack(icon, 1, iconData))); saveState(); updateName(); } @Override @SuppressWarnings("unchecked") protected void migrate(int version, ConfigurationSection wandConfig) { // First migration, clean out wand data that matches template // We've done this twice now, the second time to handle removing hard-coded defaults that // were not present in the template configs. if (version <= 1) { ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(wandConfig.getString("template")); if (templateConfig != null) { // This is an unfortunate special case for wands waiting to be randomized String randomizeIcon = templateConfig.getString("randomize_icon"); String currentIcon = wandConfig.getString("icon"); if (randomizeIcon != null && currentIcon != null && randomizeIcon.equals(currentIcon)) { wandConfig.set("icon", null); } // This was a potentially leftover property from randomized wands we can ditch wandConfig.set("randomize", null); Set<String> keys = templateConfig.getKeys(false); for (String key : keys) { Object templateData = templateConfig.get(key); Object wandData = wandConfig.get(key); if (wandData == null) continue; String templateString = templateData.toString(); String wandString = wandData.toString(); if (templateData instanceof List) { templateString = templateString.substring(1, templateString.length() - 1); templateString = templateString.replace(", ", ","); templateData = templateString; } if (wandString.equalsIgnoreCase(templateString)) { wandConfig.set(key, null); continue; } try { double numericValue = Double.parseDouble(wandString); double numericTemplate = Double.parseDouble(templateString); if (numericValue == numericTemplate) { wandConfig.set(key, null); continue; } } catch (NumberFormatException ignored) { } if (wandData.equals(templateData)) { wandConfig.set(key, null); } } } } // Remove icon if matches template if (version <= 3) { ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(wandConfig.getString("template")); String templateIcon = templateConfig == null ? null : templateConfig.getString("icon"); if (templateIcon != null && templateIcon.equals(wandConfig.getString("icon", ""))) { wandConfig.set("icon", null); } } // Migration: remove level from spell inventory if (version <= 4) { Object spellInventoryRaw = wandConfig.get("spell_inventory"); if (spellInventoryRaw != null) { Map<String, ? extends Object> spellInventory = null; Map<String, Integer> newSpellInventory = new HashMap<>(); if (spellInventoryRaw instanceof Map) { spellInventory = (Map<String, ? extends Object>)spellInventoryRaw; } else if (spellInventoryRaw instanceof ConfigurationSection) { spellInventory = NMSUtils.getMap((ConfigurationSection)spellInventoryRaw); } if (spellInventory != null) { for (Map.Entry<String, ? extends Object> spellEntry : spellInventory.entrySet()) { Object slot = spellEntry.getValue(); if (slot != null && slot instanceof Integer) { SpellKey spellKey = new SpellKey(spellEntry.getKey()); // Prefer to use the base spell if present since that is what we'd be // using on load. Object testSlot = spellInventory.get(spellKey.getBaseKey()); if (testSlot != null) { slot = testSlot; } newSpellInventory.put(spellKey.getBaseKey(), (Integer)slot); } } wandConfig.set("spell_inventory", newSpellInventory); } } } // Migration: move attributes to item_attributes if (version <= 5) { ConfigurationSection attributes = wandConfig.getConfigurationSection("attributes"); wandConfig.set("attributes", null); wandConfig.set("item_attributes", attributes); } super.migrate(version, wandConfig); } @Override public void load(ConfigurationSection configuration) { if (configuration != null) { setTemplate(configuration.getString("template")); } super.load(configuration); } protected void updateHotbarCount() { int hotbarCount = Math.max(1, getInt("hotbar_count", 1)); if (hotbarCount != hotbars.size()) { if (isInventoryOpen()) { closeInventory(); } hotbars.clear(); while (hotbars.size() < hotbarCount) { hotbars.add(CompatibilityUtils.createInventory(null, HOTBAR_INVENTORY_SIZE, getInventoryTitle())); } while (hotbars.size() > hotbarCount) { hotbars.remove(0); } } } @Override public void unenchant() { controller.cleanItem(item); clear(); } public void updateItemIcon() { setIcon(icon); } protected void updateIcon() { if (icon != null && icon.getMaterial() != null && icon.getMaterial() != Material.AIR) { String iconKey = icon.getKey(); if (iconKey != null && iconKey.isEmpty()) { iconKey = null; } WandTemplate template = getTemplate(); String templateIcon = template != null ? template.getProperty("icon", "") : null; if (templateIcon == null || !templateIcon.equals(iconKey)) { setProperty("icon", iconKey); } } } @Override public void setInactiveIcon(com.elmakers.mine.bukkit.api.block.MaterialAndData materialData) { if (materialData == null) { inactiveIcon = null; } else if (materialData instanceof MaterialAndData) { inactiveIcon = ((MaterialAndData)materialData); } else { inactiveIcon = new MaterialAndData(materialData); } String inactiveIconKey = null; if (inactiveIcon != null && inactiveIcon.getMaterial() != null && inactiveIcon.getMaterial() != Material.AIR) { inactiveIconKey = inactiveIcon.getKey(); if (inactiveIconKey != null && inactiveIconKey.isEmpty()) { inactiveIconKey = null; } } setProperty("inactive_icon", inactiveIconKey); updateItemIcon(); } public void setIcon(Material material, byte data) { setIcon(material == null ? null : new MaterialAndData(material, data)); updateIcon(); } @Override public void setIcon(com.elmakers.mine.bukkit.api.block.MaterialAndData materialData) { if (materialData instanceof MaterialAndData) { setIcon((MaterialAndData)materialData); } else { setIcon(new MaterialAndData(materialData)); } updateIcon(); } public void setIcon(MaterialAndData materialData) { if (materialData == null || !materialData.isValid()) return; if (materialData.getMaterial() == Material.AIR || materialData.getMaterial() == null) { materialData.setMaterial(DefaultWandMaterial); } icon = materialData; if (item == null) { item = InventoryUtils.makeReal(this.icon.getItemStack(1)); } Short durability = null; if (!indestructible && !isUpgrade && icon.getMaterial().getMaxDurability() > 0) { durability = item.getDurability(); } try { if (inactiveIcon == null || (mage != null && getMode() == WandMode.INVENTORY && isInventoryOpen())) { icon.applyToItem(item); } else { inactiveIcon.applyToItem(item); } } catch (Exception ex) { controller.getLogger().log(Level.WARNING, "Unable to apply wand icon", ex); item.setType(DefaultWandMaterial); } if (durability != null) { item.setDurability(durability); } // Make indestructible // The isUpgrade checks here and above are for using custom icons in 1.9, this is a bit hacky. if ((indestructible || Unbreakable || isUpgrade) && !manaMode.useDurability()) { CompatibilityUtils.makeUnbreakable(item); } else { CompatibilityUtils.removeUnbreakable(item); } CompatibilityUtils.hideFlags(item, getProperty("hide_flags", HIDE_FLAGS)); } @Override public void makeUpgrade() { if (!isUpgrade) { isUpgrade = true; String oldName = wandName; String newName = getMessage("upgrade_name"); newName = newName.replace("$name", oldName); String newDescription = controller.getMessages().get("wand.upgrade_default_description"); if (template != null && template.length() > 0) { newDescription = controller.getMessages().get("wands." + template + ".upgrade_description", description); } setIcon(DefaultUpgradeMaterial, (byte) 0); setName(newName); setDescription(newDescription); InventoryUtils.removeMeta(item, WAND_KEY); saveState(); updateName(true); updateLore(); } } public void newId() { id = UUID.randomUUID().toString(); setProperty("id", id); } public boolean checkId() { if (id == null || id.length() == 0) { newId(); return true; } return false; } @Override public String getId() { return id; } public boolean isModifiable() { return !locked; } @Override public boolean isIndestructible() { return indestructible; } @Override public boolean isUndroppable() { return undroppable; } public boolean isUpgrade() { return isUpgrade; } public static boolean isUpgrade(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, UPGRADE_KEY); } @Override public boolean usesMana() { if (isCostFree()) return false; return getManaMax() > 0 || (isHeroes && mage != null); } @Override public void removeMana(float amount) { if (isHeroes && mage != null) { HeroesManager heroes = controller.getHeroes(); if (heroes != null) { heroes.removeMana(mage.getPlayer(), (int)Math.ceil(amount)); } } super.removeMana(amount); updateMana(); } @Override public float getCostReduction() { if (mage != null) { float reduction = mage.getCostReduction(); return passive ? reduction : stackPassiveProperty(reduction, costReduction * controller.getMaxCostReduction()); } return costReduction; } @Override public float getCooldownReduction() { if (mage != null) { float reduction = mage.getCooldownReduction(); return passive ? reduction : stackPassiveProperty(reduction, cooldownReduction * controller.getMaxCooldownReduction()); } return cooldownReduction; } @Override public float getConsumeReduction() { if (mage != null) { float reduction = mage.getConsumeReduction(); return passive ? reduction : stackPassiveProperty(reduction, consumeReduction); } return consumeReduction; } @Override public float getCostScale() { return 1; } @Override public boolean hasInventory() { return hasInventory; } @Override public float getPower() { return power; } @Override public boolean isSuperProtected() { return superProtected; } @Override public boolean isSuperPowered() { return superPowered; } @Override public boolean isConsumeFree() { return consumeReduction >= 1; } @Override public boolean isCooldownFree() { return cooldownReduction > 1; } @Override public String getName() { return ChatColor.translateAlternateColorCodes('&', wandName); } public String getDescription() { return description; } public String getOwner() { return owner == null ? "" : owner; } public String getOwnerId() { return ownerId; } @Override public long getWorth() { long worth = 0; // TODO: Item properties, brushes, etc Set<String> spells = getSpells(); for (String spellKey : spells) { SpellTemplate spell = controller.getSpellTemplate(spellKey); if (spell != null) { worth = (long)(worth + spell.getWorth()); } } return worth; } @Override public void setName(String name) { wandName = ChatColor.stripColor(name); setProperty("name", wandName); updateName(); } public void setTemplate(String templateName) { this.template = templateName; WandTemplate wandTemplate = controller.getWandTemplate(templateName); if (wandTemplate != null) { setWandTemplate(wandTemplate); } setProperty("template", template); } @Override public String getTemplateKey() { return this.template; } @Override public boolean hasTag(String tag) { WandTemplate template = getTemplate(); return template != null && template.hasTag(tag); } @Override public WandUpgradePath getPath() { String pathKey = path; if (pathKey == null || pathKey.length() == 0) { pathKey = controller.getDefaultWandPath(); } return WandUpgradePath.getPath(pathKey); } public boolean hasPath() { return path != null && path.length() > 0; } @Override public void setDescription(String description) { this.description = description; setProperty("description", description); updateLore(); } public boolean tryToOwn(Player player) { if (ownerId == null || ownerId.length() == 0) { takeOwnership(player); return true; } return false; } public void takeOwnership(Player player) { Mage mage = this.mage; if (mage == null) { mage = controller.getMage(player); } if ((ownerId == null || ownerId.length() == 0) && quietLevel < 2) { mage.sendMessage(getMessage("bound_instructions", "").replace("$wand", getName())); String spellKey = getActiveSpellKey(); SpellTemplate spellTemplate = spellKey != null && !spellKey.isEmpty() ? controller.getSpellTemplate(spellKey) : null; if (spellTemplate != null) { String message = getMessage("spell_instructions", "").replace("$wand", getName()); mage.sendMessage(message.replace("$spell", spellTemplate.getName())); } if (spells.size() > 1) { String controlKey = getControlKey(WandAction.TOGGLE); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage("inventory_instructions", "") .replace("$wand", getName()).replace("$toggle", controlKey)); } } com.elmakers.mine.bukkit.api.wand.WandUpgradePath path = getPath(); if (path != null) { String message = getMessage("enchant_instructions", "").replace("$wand", getName()); mage.sendMessage(message); } } owner = ChatColor.stripColor(player.getDisplayName()); ownerId = mage.getId(); setProperty("owner", owner); setProperty("owner_id", ownerId); updateLore(); saveState(); } @Nullable public String getControlKey(WandAction action) { String controlKey = null; if (rightClickAction == action) { controlKey = "right_click"; } else if (dropAction == action) { controlKey = "drop"; } else if (leftClickAction == action) { controlKey = "left_click"; } else if (swapAction == action) { controlKey = "swap"; } return controlKey; } @Nullable @Override public ItemStack getItem() { return item; } public void setItem(ItemStack item) { this.item = item; } @Override public com.elmakers.mine.bukkit.api.block.MaterialAndData getIcon() { return icon; } @Override public com.elmakers.mine.bukkit.api.block.MaterialAndData getInactiveIcon() { return inactiveIcon; } protected List<Inventory> getAllInventories() { int hotbarCount = getHotbarCount(); List<Inventory> allInventories = new ArrayList<>(inventories.size() + hotbarCount); if (hotbarCount > 0) { allInventories.addAll(hotbars); } allInventories.addAll(inventories); return allInventories; } @Override public Set<String> getBaseSpells() { return spells; } @Override protected @Nonnull Map<String, Integer> getSpellLevels() { return spellLevels; } @Override public Set<String> getSpells() { Set<String> spellSet = new HashSet<>(); for (String key : spells) { Integer level = spellLevels.get(key); if (level != null) { spellSet.add(new SpellKey(key, level).getKey()); } else { spellSet.add(key); } } return spellSet; } @Override public Set<String> getBrushes() { return brushes; } @Nullable protected Integer parseSlot(String[] pieces) { Integer slot = null; if (pieces.length > 1) { try { slot = Integer.parseInt(pieces[1]); } catch (Exception ex) { slot = null; } if (slot != null && slot < 0) { slot = null; } } return slot; } protected void addToInventory(ItemStack itemStack, Integer slot) { if (slot == null) { addToInventory(itemStack); return; } Inventory inventory = getInventory(slot); slot = getInventorySlot(slot); ItemStack existing = inventory.getItem(slot); inventory.setItem(slot, itemStack); if (existing != null && existing.getType() != Material.AIR) { addToInventory(existing); } } public void addToInventory(ItemStack itemStack) { if (itemStack == null || itemStack.getType() == Material.AIR) { return; } if (getBrushMode() != WandMode.INVENTORY && isBrush(itemStack)) { String brushKey = getBrush(itemStack); if (!MaterialBrush.isSpecialMaterialKey(brushKey) || MaterialBrush.isSchematic(brushKey)) { return; } } List<Inventory> checkInventories = getAllInventories(); boolean added = false; WandMode mode = getMode(); int fullSlot = 0; for (Inventory inventory : checkInventories) { int inventorySize = inventory.getSize(); Integer slot = null; int freeSpace = 0; for (int i = 0; i < inventorySize && freeSpace < INVENTORY_ORGANIZE_BUFFER; i++) { ItemStack existing = inventory.getItem(i); if (InventoryUtils.isEmpty(existing)) { if (slot == null) { slot = i; } freeSpace++; } } // Don't leave free space in hotbars if (slot != null && (freeSpace >= INVENTORY_ORGANIZE_BUFFER || inventorySize == HOTBAR_INVENTORY_SIZE || mode == WandMode.CHEST)) { added = true; inventory.setItem(slot, itemStack); fullSlot += slot; break; } fullSlot += inventory.getSize(); } if (!added) { fullSlot = getHotbarSize() + getInventorySize() * inventories.size(); Inventory newInventory = CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle()); newInventory.addItem(itemStack); inventories.add(newInventory); } updateSlot(fullSlot, itemStack); } protected @Nonnull Inventory getInventoryByIndex(int inventoryIndex) { // Auto create while (inventoryIndex >= inventories.size()) { inventories.add(CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle())); } return inventories.get(inventoryIndex); } protected int getHotbarSize() { if (getMode() != WandMode.INVENTORY) return 0; return hotbars.size() * HOTBAR_INVENTORY_SIZE; } protected @Nonnull Inventory getInventory(int slot) { int hotbarSize = getHotbarSize(); if (slot < hotbarSize) { return hotbars.get(slot / HOTBAR_INVENTORY_SIZE); } int inventoryIndex = (slot - hotbarSize) / getInventorySize(); return getInventoryByIndex(inventoryIndex); } protected int getInventorySlot(int slot) { int hotbarSize = getHotbarSize(); if (slot < hotbarSize) { return slot % HOTBAR_INVENTORY_SIZE; } return ((slot - hotbarSize) % getInventorySize()); } protected void buildInventory() { // Force an update of the display inventory since chest mode is a different size displayInventory = null; updateHotbarCount(); for (Inventory hotbar : hotbars) { hotbar.clear(); } inventories.clear(); List<ItemStack> unsorted = new ArrayList<>(); for (String key : spells) { int spellLevel = getSpellLevel(key); SpellKey spellKey = new SpellKey(key, spellLevel); SpellTemplate spell = mage == null ? controller.getSpellTemplate(spellKey.getKey()) : mage.getSpell(spellKey.getKey()); ItemStack itemStack = createSpellItem(spellKey.getKey(), "", false); if (itemStack != null) { Integer slot = spellInventory.get(spell.getSpellKey().getBaseKey()); if (slot == null) { unsorted.add(itemStack); } else { addToInventory(itemStack, slot); } } } WandMode brushMode = getBrushMode(); for (String brushKey : brushes) { boolean addToInventory = brushMode == WandMode.INVENTORY || (MaterialBrush.isSpecialMaterialKey(brushKey) && !MaterialBrush.isSchematic(brushKey)); if (addToInventory) { ItemStack itemStack = createBrushIcon(brushKey); if (itemStack == null) { controller.getPlugin().getLogger().warning("Unable to create brush icon for key " + brushKey); continue; } Integer slot = brushInventory.get(brushKey); if (activeBrush == null || activeBrush.length() == 0) activeBrush = brushKey; addToInventory(itemStack, slot); } } for (ItemStack unsortedItem : unsorted) { addToInventory(unsortedItem); } updateHasInventory(); if (openInventoryPage >= inventories.size() && openInventoryPage != 0 && hasInventory) { setOpenInventoryPage(0); } } protected void parseSpells(String spellString) { // Support YML-List-As-String format // Maybe don't need this anymore since loading lists is now a separate path spellString = spellString.replaceAll("[\\]\\[]", ""); String[] spellNames = StringUtils.split(spellString, ','); loadSpells(Arrays.asList(spellNames)); } protected void clearSpells() { spellLevels.clear(); spells.clear(); } protected void loadSpells(Collection<String> spellKeys) { clearSpells(); WandUpgradePath path = getPath(); for (String spellName : spellKeys) { String[] pieces = StringUtils.split(spellName, '@'); Integer slot = parseSlot(pieces); // Handle aliases and upgrades smoothly String loadedKey = pieces[0].trim(); SpellKey spellKey = new SpellKey(loadedKey); SpellTemplate spell = controller.getSpellTemplate(loadedKey); if (limitSpellsToPath && path != null && !path.containsSpell(spellKey.getBaseKey())) continue; // Downgrade spells if higher levels have gone missing while (spell == null && spellKey.getLevel() > 0) { spellKey = new SpellKey(spellKey.getBaseKey(), spellKey.getLevel() - 1); spell = controller.getSpellTemplate(spellKey.getKey()); } if (spell != null) { spellKey = spell.getSpellKey(); Integer currentLevel = spellLevels.get(spellKey.getBaseKey()); if (spellKey.getLevel() > 1 && (currentLevel == null || currentLevel < spellKey.getLevel())) { setSpellLevel(spellKey.getBaseKey(), spellKey.getLevel()); } if (slot != null) { spellInventory.put(spellKey.getBaseKey(), slot); } spells.add(spellKey.getBaseKey()); if (activeSpell == null || activeSpell.length() == 0) { activeSpell = spellKey.getBaseKey(); } } } } private void loadSpells() { Object wandSpells = getObject("spells"); if (wandSpells != null) { if (wandSpells instanceof String) { parseSpells((String)wandSpells); } else if (wandSpells instanceof Collection) { @SuppressWarnings("unchecked") Collection<String> spellList = (Collection<String>)wandSpells; loadSpells(spellList); } else { clearSpells(); } } else { clearSpells(); } } protected void parseBrushes(String brushString) { // Support YML-List-As-String format // Maybe don't need this anymore since loading lists is now a separate path brushString = brushString.replaceAll("[\\]\\[]", ""); String[] brushNames = StringUtils.split(brushString, ','); loadBrushes(Arrays.asList(brushNames)); } protected void clearBrushes() { brushes.clear(); } protected void loadBrushes(Collection<String> brushKeys) { WandUpgradePath path = getPath(); clearBrushes(); for (String materialName : brushKeys) { String[] pieces = StringUtils.split(materialName, '@'); Integer slot = parseSlot(pieces); String materialKey = pieces[0].trim(); if (limitBrushesToPath && path != null && !path.containsBrush(materialKey)) continue; if (slot != null) { brushInventory.put(materialKey, slot); } brushes.add(materialKey); } } private void loadBrushes() { Object wandBrushes = getObject("brushes", getObject("materials")); if (wandBrushes != null) { if (wandBrushes instanceof String) { parseBrushes((String)wandBrushes); } else if (wandBrushes instanceof Collection) { @SuppressWarnings("unchecked") Collection<String> brushList = (Collection<String>)wandBrushes; loadBrushes(brushList); } else { clearBrushes(); } } else { clearBrushes(); } } protected void loadBrushInventory(Map<String, ? extends Object> inventory) { if (inventory == null) return; WandUpgradePath path = getPath(); for (Map.Entry<String, ?> brushEntry : inventory.entrySet()) { Object slot = brushEntry.getValue(); String brushKey = brushEntry.getKey(); if (limitBrushesToPath && path != null && !path.containsBrush(brushKey)) continue; if (slot != null && slot instanceof Integer) { brushInventory.put(brushKey, (Integer)slot); } } } protected void loadSpellInventory(Map<String, ? extends Object> inventory) { if (inventory == null) return; WandUpgradePath path = getPath(); for (Map.Entry<String, ? extends Object> spellEntry : inventory.entrySet()) { String spellKey = spellEntry.getKey(); if (limitSpellsToPath && path != null && !path.containsSpell(spellKey)) continue; Object slot = spellEntry.getValue(); if (slot != null && slot instanceof Integer) { spellInventory.put(spellKey, (Integer)slot); } } } protected void loadSpellLevels(Map<String, ? extends Object> levels) { if (levels == null) return; for (Map.Entry<String, ? extends Object> spellEntry : levels.entrySet()) { Object level = spellEntry.getValue(); if (level != null && level instanceof Integer) { setSpellLevel(spellEntry.getKey(), (Integer)level); } } } @Nullable public static ItemStack createSpellItem(String spellKey, MagicController controller, Wand wand, boolean isItem) { String[] split = spellKey.split(" ", 2); return createSpellItem(controller.getSpellTemplate(split[0]), split.length > 1 ? split[1] : "", controller, wand == null ? null : wand.getActiveMage(), wand, isItem); } @Nullable public static ItemStack createSpellItem(String spellKey, MagicController controller, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, boolean isItem) { String[] split = spellKey.split(" ", 2); return createSpellItem(controller.getSpellTemplate(split[0]), split.length > 1 ? split[1] : "", controller, mage, wand, isItem); } @Nullable public ItemStack createSpellItem(String spellKey) { return createSpellItem(spellKey, "", false); } @Nullable public ItemStack createSpellItem(String spellKey, String args, boolean isItem) { SpellTemplate spell = mage == null ? controller.getSpellTemplate(spellKey) : mage.getSpell(spellKey); return createSpellItem(spell, args, controller, mage, this, isItem); } @Nullable public static ItemStack createSpellItem(SpellTemplate spell, String args, MagicController controller, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, boolean isItem) { if (spell == null) return null; String iconURL = spell.getIconURL(); ItemStack itemStack = null; if (iconURL != null && (controller.isUrlIconsEnabled() || spell.getIcon() == null || !spell.getIcon().isValid() || spell.getIcon().getMaterial() == Material.AIR)) { itemStack = controller.getURLSkull(iconURL); } if (itemStack == null) { ItemStack originalItemStack = null; com.elmakers.mine.bukkit.api.block.MaterialAndData icon = spell.getIcon(); if (icon == null) { controller.getPlugin().getLogger().warning("Unable to create spell icon for " + spell.getName() + ", missing material"); return null; } try { originalItemStack = new ItemStack(icon.getMaterial(), 1, icon.getData()); itemStack = InventoryUtils.makeReal(originalItemStack); } catch (Exception ex) { itemStack = null; } if (itemStack == null) { if (icon.getMaterial() != Material.AIR) { String iconName = icon.getName(); controller.getPlugin().getLogger().warning("Unable to create spell icon for " + spell.getKey() + " with material " + iconName); } return originalItemStack; } } InventoryUtils.makeUnbreakable(itemStack); InventoryUtils.hideFlags(itemStack, (byte)63); updateSpellItem(controller.getMessages(), itemStack, spell, args, mage, wand, wand == null ? null : wand.activeBrush, isItem); if (wand != null && wand.getMode() == WandMode.SKILLS && !isItem) { String mageClassKey = wand.getMageClassKey(); ConfigurationSection skillsConfig = wand.getConfigurationSection("skills"); InventoryUtils.configureSkillItem(itemStack, mageClassKey, skillsConfig); } return itemStack; } @Nullable protected ItemStack createBrushIcon(String materialKey) { return createBrushItem(materialKey, controller, this, false); } @Nullable public static ItemStack createBrushItem(String materialKey, com.elmakers.mine.bukkit.api.magic.MageController controller, Wand wand, boolean isItem) { MaterialBrush brushData = MaterialBrush.parseMaterialKey(materialKey); if (brushData == null) return null; ItemStack itemStack = brushData.getItem(controller, isItem); if (BrushGlow || (isItem && BrushItemGlow)) { CompatibilityUtils.addGlow(itemStack); } InventoryUtils.makeUnbreakable(itemStack); InventoryUtils.hideFlags(itemStack, (byte)63); updateBrushItem(controller.getMessages(), itemStack, brushData, wand); return itemStack; } protected boolean findItem() { if (mage != null && item != null) { Player player = mage.getPlayer(); if (player != null) { ItemStack itemInHand = player.getInventory().getItemInMainHand(); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = false; return true; } itemInHand = player.getInventory().getItemInOffHand(); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = true; return true; } itemInHand = player.getInventory().getItem(heldSlot); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = true; return true; } } } return false; } @Override public void saveState() { // Make sure we're on the current item instance if (findItem()) { updateItemIcon(); updateName(); updateLore(); } if (item == null || item.getType() == Material.AIR) return; // Check for upgrades that still have wand data if (isUpgrade && isWand(item)) { InventoryUtils.removeMeta(item, WAND_KEY); } Object wandNode = InventoryUtils.createNode(item, isUpgrade ? UPGRADE_KEY : WAND_KEY); if (wandNode == null) { controller.getLogger().warning("Failed to save wand state for wand to : " + item); } else { InventoryUtils.saveTagsToNBT(getConfiguration(), wandNode); } } @Nullable public static ConfigurationSection itemToConfig(ItemStack item, ConfigurationSection stateNode) { Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) { wandNode = InventoryUtils.getNode(item, UPGRADE_KEY); if (wandNode == null) { return null; } } ConfigurationUtils.loadAllTagsFromNBT(stateNode, wandNode); return stateNode; } public static void configToItem(ConfigurationSection itemSection, ItemStack item) { ConfigurationSection stateNode = itemSection.getConfigurationSection("wand"); Object wandNode = InventoryUtils.createNode(item, Wand.WAND_KEY); if (wandNode != null) { InventoryUtils.saveTagsToNBT(stateNode, wandNode); } } @Nullable protected String getPotionEffectString() { return getPotionEffectString(potionEffects); } @Override public void save(ConfigurationSection node, boolean filtered) { ConfigurationUtils.addConfigurations(node, getConfiguration()); // Filter out some fields if (filtered) { node.set("id", null); node.set("owner_id", null); node.set("owner", null); node.set("template", null); node.set("mana_timestamp", null); node.set("enchant_count", null); } if (isUpgrade) { node.set("upgrade", true); } if (template != null && !template.isEmpty()) { node.set("template", null); node.set("inherit", template); } } public void save() { saveState(); updateName(); updateLore(); } public void updateBrushInventory() { if (brushInventory.isEmpty()) { setProperty("brush_inventory", null); } else { setProperty("brush_inventory", new HashMap<>(brushInventory)); } } protected void updateBrushInventory(Map<String, Integer> updateBrushes) { for (Map.Entry<String, Integer> brushEntry : brushInventory.entrySet()) { String brushKey = brushEntry.getKey(); Integer slot = updateBrushes.get(brushKey); if (slot != null) { brushEntry.setValue(slot); } } } public void updateSpellInventory() { if (spellInventory.isEmpty()) { setProperty("spell_inventory", null); } else { setProperty("spell_inventory", new HashMap<>(spellInventory)); } } protected void updateSpellInventory(Map<String, Integer> updateSpells) { for (Map.Entry<String, Integer> spellEntry : spellInventory.entrySet()) { String spellKey = spellEntry.getKey(); Integer slot = updateSpells.get(spellKey); if (slot != null) { spellEntry.setValue(slot); } } } public void setEffectColor(String hexColor) { // Annoying config conversion issue :\ if (hexColor.contains(".")) { hexColor = hexColor.substring(0, hexColor.indexOf('.')); } if (hexColor == null || hexColor.length() == 0 || hexColor.equals("none")) { effectColor = null; return; } effectColor = new ColorHD(hexColor); if (hexColor.equals("random")) { setProperty("effect_color", effectColor.toString()); } } private void migrateProtection(String legacy, String migrateTo) { if (hasProperty(legacy)) { double protection = getDouble(legacy); clearProperty(legacy); setProperty("protection." + migrateTo, protection); } } @Nullable private MaterialAndData loadIcon(String key) { if (key == null || key.isEmpty()) { return null; } ItemData itemData = controller.getOrCreateItem(key); if (itemData == null) { return null; } com.elmakers.mine.bukkit.api.block.MaterialAndData materialData = itemData.getMaterialAndData(); return materialData instanceof MaterialAndData ? (MaterialAndData)materialData : null; } @Override public void loadProperties() { super.loadProperties(); locked = getBoolean("locked", locked); lockedAllowUpgrades = getBoolean("locked_allow_upgrades", false); consumeReduction = getFloat("consume_reduction"); cooldownReduction = getFloat("cooldown_reduction"); costReduction = getFloat("cost_reduction"); power = getFloat("power"); ConfigurationSection protectionConfig = getConfigurationSection("protection"); if (protectionConfig == null && hasProperty("protection")) { migrateProtection("protection", "overall"); migrateProtection("protection_physical", "physical"); migrateProtection("protection_projectiles", "projectile"); migrateProtection("protection_falling", "fall"); migrateProtection("protection_fire", "fire"); migrateProtection("protection_explosions", "explosion"); protectionConfig = getConfigurationSection("protection"); } if (protectionConfig != null) { protection = new HashMap<>(); for (String protectionKey : protectionConfig.getKeys(false)) { protection.put(protectionKey, protectionConfig.getDouble(protectionKey)); } } hasId = getBoolean("unique", false); blockChance = getFloat("block_chance"); blockReflectChance = getFloat("block_reflect_chance"); blockFOV = getFloat("block_fov"); blockMageCooldown = getInt("block_mage_cooldown"); blockCooldown = getInt("block_cooldown"); manaPerDamage = getFloat("mana_per_damage"); earnMultiplier = getFloat("earn_multiplier", getFloat("sp_multiplier", 1)); String singleClass = getString("class"); if (singleClass != null && !singleClass.isEmpty()) { mageClassKeys = new ArrayList<>(); mageClassKeys.add(singleClass); } else { mageClassKeys = getStringList("classes"); } // Check for single-use wands uses = getInt("uses"); hasUses = uses > 0; // Convert some legacy properties to potion effects float healthRegeneration = getFloat("health_regeneration", 0); float hungerRegeneration = getFloat("hunger_regeneration", 0); float speedIncrease = getFloat("haste", 0); if (speedIncrease > 0) { potionEffects.put(PotionEffectType.SPEED, 1); } if (healthRegeneration > 0) { potionEffects.put(PotionEffectType.REGENERATION, 1); } if (hungerRegeneration > 0) { potionEffects.put(PotionEffectType.SATURATION, 1); } // This overrides the value loaded in CasterProperties if (!regenWhileInactive) { setProperty("mana_timestamp", System.currentTimeMillis()); } if (hasProperty("effect_color")) { setEffectColor(getString("effect_color")); } id = getString("id"); isUpgrade = getBoolean("upgrade"); quietLevel = getInt("quiet"); effectBubbles = getBoolean("effect_bubbles"); keep = getBoolean("keep"); passive = getBoolean("passive"); indestructible = getBoolean("indestructible"); superPowered = getBoolean("powered"); superProtected = getBoolean("protected"); glow = getBoolean("glow"); undroppable = getBoolean("undroppable"); isHeroes = getBoolean("heroes"); bound = getBoolean("bound"); forceUpgrade = getBoolean("force"); autoOrganize = getBoolean("organize"); autoAlphabetize = getBoolean("alphabetize"); autoFill = getBoolean("fill"); rename = getBoolean("rename"); renameDescription = getBoolean("rename_description"); enchantCount = getInt("enchant_count"); maxEnchantCount = getInt("max_enchant_count"); inventoryRows = getInt("inventory_rows", 5); if (inventoryRows <= 0) inventoryRows = 1; if (hasProperty("effect_particle")) { effectParticle = ConfigurationUtils.toParticleEffect(getString("effect_particle")); effectParticleData = 0; } else { effectParticle = null; } if (hasProperty("effect_sound")) { effectSound = ConfigurationUtils.toSoundEffect(getString("effect_sound")); } else { effectSound = null; } activeEffectsOnly = getBoolean("active_effects"); effectParticleData = getFloat("effect_particle_data"); effectParticleCount = getInt("effect_particle_count"); effectParticleRadius = getDouble("effect_particle_radius"); effectParticleOffset = getDouble("effect_particle_offset"); effectParticleInterval = getInt("effect_particle_interval"); effectParticleMinVelocity = getDouble("effect_particle_min_velocity"); effectSoundInterval = getInt("effect_sound_interval"); castLocation = getVector("cast_location"); castInterval = getInt("cast_interval"); castMinVelocity = getDouble("cast_min_velocity"); castVelocityDirection = getVector("cast_velocity_direction"); castSpell = getString("cast_spell"); String castParameterString = getString("cast_parameters", null); if (castParameterString != null && !castParameterString.isEmpty()) { castParameters = new MemoryConfiguration(); ConfigurationUtils.addParameters(StringUtils.split(castParameterString, ' '), castParameters); } else { castParameters = null; } WandMode newMode = parseWandMode(getString("mode"), controller.getDefaultWandMode()); if (newMode != mode) { if (isInventoryOpen()) { closeInventory(); } mode = newMode; } brushMode = parseWandMode(getString("brush_mode"), controller.getDefaultBrushMode()); currencyDisplay = controller.getCurrency(getString("currency_display", "sp")); // Backwards compatibility if (getBoolean("mode_drop", false)) { dropAction = WandAction.TOGGLE; swapAction = WandAction.CYCLE_HOTBAR; rightClickAction = WandAction.NONE; quickCast = true; // This is to turn the redundant spell lore off quickCastDisabled = true; manualQuickCastDisabled = false; } else if (mode == WandMode.CAST) { leftClickAction = WandAction.CAST; rightClickAction = WandAction.CAST; swapAction = WandAction.NONE; dropAction = WandAction.NONE; } else if (mode == WandMode.CYCLE) { leftClickAction = WandAction.CAST; rightClickAction = WandAction.NONE; swapAction = WandAction.NONE; dropAction = WandAction.CYCLE; } else { leftClickAction = WandAction.NONE; rightClickAction = WandAction.NONE; dropAction = WandAction.NONE; swapAction = WandAction.NONE; quickCast = false; quickCastDisabled = false; manualQuickCastDisabled = false; } String quickCastType = getString("quick_cast", getString("mode_cast")); if (quickCastType != null) { if (quickCastType.equalsIgnoreCase("true")) { quickCast = true; // This is to turn the redundant spell lore off quickCastDisabled = true; manualQuickCastDisabled = false; } else if (quickCastType.equalsIgnoreCase("manual")) { quickCast = false; quickCastDisabled = true; manualQuickCastDisabled = false; } else if (quickCastType.equalsIgnoreCase("disable")) { quickCast = false; quickCastDisabled = true; manualQuickCastDisabled = true; } else { quickCast = false; quickCastDisabled = false; manualQuickCastDisabled = false; } } leftClickAction = parseWandAction(getString("left_click"), leftClickAction); rightClickAction = parseWandAction(getString("right_click"), rightClickAction); dropAction = parseWandAction(getString("drop"), dropAction); swapAction = parseWandAction(getString("swap"), swapAction); owner = getString("owner"); ownerId = getString("owner_id"); template = getString("template"); upgradeTemplate = getString("upgrade_template"); path = getString("path"); activeSpell = getString("active_spell"); if (activeSpell != null && activeSpell.contains("|")) { SpellKey activeKey = new SpellKey(activeSpell); activeSpell = activeKey.getBaseKey(); setProperty("active_spell", activeSpell); } alternateSpell = getString("alternate_spell"); alternateSpell2 = getString("alternate_spell2"); activeBrush = getString("active_brush", getString("active_material")); if (hasProperty("hotbar")) { currentHotbar = getInt("hotbar"); } if (hasProperty("page")) { int page = getInt("page"); if (page != openInventoryPage) { openInventoryPage = page; } } // Default to template names, override with localizations and finally with wand data wandName = controller.getMessages().get("wand.default_name"); description = ""; // Check for migration information in the template config ConfigurationSection templateConfig = null; if (template != null && !template.isEmpty()) { templateConfig = controller.getWandTemplateConfiguration(template); if (templateConfig != null) { wandName = templateConfig.getString("name", wandName); description = templateConfig.getString("description", description); int templateUses = templateConfig.getInt("uses"); isSingleUse = templateUses == 1; hasUses = hasUses || templateUses > 0; } wandName = controller.getMessages().get("wands." + template + ".name", wandName); description = controller.getMessages().get("wands." + template + ".description", description); } wandName = getString("name", wandName); description = getString("description", description); WandTemplate wandTemplate = getTemplate(); if (hasProperty("icon_inactive")) { String iconKey = getString("icon_inactive"); if (wandTemplate != null) { iconKey = wandTemplate.migrateIcon(iconKey); } if (iconKey != null) { inactiveIcon = loadIcon(iconKey); } } else { inactiveIcon = null; } if (inactiveIcon != null && (inactiveIcon.getMaterial() == null || inactiveIcon.getMaterial() == Material.AIR)) { inactiveIcon = null; } inactiveIconDelay = getInt("icon_inactive_delay"); randomizeOnActivate = randomizeOnActivate && hasProperty("randomize_icon"); if (randomizeOnActivate) { String randomizeIcon = getString("randomize_icon"); setIcon(loadIcon(randomizeIcon)); if (item == null) { controller.getLogger().warning("Invalid randomize_icon in wand '" + template + "' config: " + randomizeIcon); } } else if (hasProperty("icon")) { String iconKey = getString("icon"); if (wandTemplate != null) { iconKey = wandTemplate.migrateIcon(iconKey); } if (iconKey.contains(",")) { Random r = new Random(); String[] keys = StringUtils.split(iconKey, ','); iconKey = keys[r.nextInt(keys.length)]; } // Port old custom wand icons if (templateConfig != null && iconKey.contains("i.imgur.com")) { iconKey = templateConfig.getString("icon"); } setIcon(loadIcon(iconKey)); if (item == null) { controller.getLogger().warning("Invalid icon in wand '" + template + "' config: " + iconKey); } updateIcon(); } else if (isUpgrade) { setIcon(new MaterialAndData(DefaultUpgradeMaterial)); } else { setIcon(new MaterialAndData(DefaultWandMaterial)); } if (hasProperty("upgrade_icon")) { upgradeIcon = loadIcon(getString("upgrade_icon")); } // Add vanilla attributes InventoryUtils.applyAttributes(item, getConfigurationSection("item_attributes"), getString("item_attribute_slot", getString("attribute_slot"))); // Add unstashable and unmoveable tags if (getBoolean("unstashable") || (undroppable && Unstashable)) { InventoryUtils.setMetaBoolean(item, "unstashable", true); } else { InventoryUtils.removeMeta(item, "unstashable"); } if (getBoolean("unmoveable")) { InventoryUtils.setMetaBoolean(item, "unmoveable", true); } else { InventoryUtils.removeMeta(item, "unmoveable"); } if (undroppable) { InventoryUtils.setMetaBoolean(item, "undroppable", true); } else { InventoryUtils.removeMeta(item, "undroppable"); } if (keep) { InventoryUtils.setMetaBoolean(item, "keep", true); } else { InventoryUtils.removeMeta(item, "keep"); } // Add vanilla enchantments ConfigurationSection enchantments = getConfigurationSection("enchantments"); InventoryUtils.applyEnchantments(item, enchantments); // Add enchantment glow if (enchantments == null || enchantments.getKeys(false).isEmpty()) { if (glow) { CompatibilityUtils.addGlow(item); } else { CompatibilityUtils.removeGlow(item); } } // Check for path-based migration, may update icons com.elmakers.mine.bukkit.api.wand.WandUpgradePath upgradePath = getPath(); if (upgradePath != null) { hasSpellProgression = upgradePath.getSpells().size() > 0 || upgradePath.getExtraSpells().size() > 0 || upgradePath.getRequiredSpells().size() > 0; upgradePath.checkMigration(this); } else { hasSpellProgression = false; } if (isHeroes) { hasSpellProgression = true; } brushInventory.clear(); spellInventory.clear(); limitSpellsToPath = getBoolean("limit_spells_to_path"); limitBrushesToPath = getBoolean("limit_brushes_to_path"); loadSpells(); // Load spell levels Object spellLevelsRaw = getObject("spell_levels"); if (spellLevelsRaw != null) { // Not sure this will ever appear as a Map, but just in case if (spellLevelsRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> spellLevels = (Map<String, Integer>)spellLevelsRaw; loadSpellLevels(spellLevels); } else if (spellLevelsRaw instanceof ConfigurationSection) { loadSpellLevels(NMSUtils.getMap((ConfigurationSection)spellLevelsRaw)); } } checkActiveSpell(); loadBrushes(); Object brushInventoryRaw = getObject("brush_inventory"); if (brushInventoryRaw != null) { // Not sure this will ever appear as a Map, but just in case if (brushInventoryRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> brushInventory = (Map<String, Integer>)brushInventoryRaw; loadBrushInventory(brushInventory); } else if (brushInventoryRaw instanceof ConfigurationSection) { loadBrushInventory(NMSUtils.getMap((ConfigurationSection)brushInventoryRaw)); } } Object spellInventoryRaw = getObject("spell_inventory"); if (spellInventoryRaw != null) { // Not sure this will ever appear as a Map, but just in case if (spellInventoryRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> spellInventory = (Map<String, Integer>)spellInventoryRaw; loadSpellInventory(spellInventory); } else if (spellInventoryRaw instanceof ConfigurationSection) { loadSpellInventory(NMSUtils.getMap((ConfigurationSection)spellInventoryRaw)); } } else { // Spells may have contained an inventory from migration or templates with a spell@slot format. updateSpellInventory(); } castOverrides = null; if (hasProperty("overrides")) { castOverrides = null; Object overridesGeneric = getObject("overrides"); if (overridesGeneric != null) { castOverrides = new HashMap<>(); if (overridesGeneric instanceof String) { String overrides = (String) overridesGeneric; if (!overrides.isEmpty()) { // Support YML-List-As-String format // May not really need this anymore. overrides = overrides.replaceAll("[\\]\\[]", ""); String[] pairs = StringUtils.split(overrides, ','); for (String override : pairs) { parseOverride(override); } } } else if (overridesGeneric instanceof List) { @SuppressWarnings("unchecked") List<String> overrideList = (List<String>)overridesGeneric; for (String override : overrideList) { parseOverride(override); } } else if (overridesGeneric instanceof ConfigurationSection) { ConfigurationSection overridesSection = (ConfigurationSection)overridesGeneric; Set<String> keys = overridesSection.getKeys(true); for (String key : keys) { Object leaf = overridesSection.get(key); if (!(leaf instanceof ConfigurationSection) && !(leaf instanceof Map)) { castOverrides.put(key, leaf.toString()); } } } } } potionEffects.clear(); if (hasProperty("potion_effects")) { addPotionEffects(potionEffects, getString("potion_effects", null)); } // Some cleanup and sanity checks. In theory we don't need to store any non-zero value (as it is with the traders) // so try to keep defaults as 0/0.0/false. if (effectSound == null) { effectSoundInterval = 0; } else { effectSoundInterval = (effectSoundInterval == 0) ? 5 : effectSoundInterval; } if (effectParticle == null) { effectParticleInterval = 0; } checkActiveMaterial(); } private void parseOverride(String override) { // Unescape commas override = override.replace("\\|", ","); String[] keyValue = StringUtils.split(override, ' '); if (keyValue.length > 0) { String value = keyValue.length > 1 ? keyValue[1] : ""; castOverrides.put(keyValue[0], value); } } @Override public void describe(CommandSender sender, @Nullable Set<String> ignoreProperties) { ChatColor wandColor = isModifiable() ? ChatColor.AQUA : ChatColor.RED; sender.sendMessage(wandColor + getName()); if (isUpgrade) { sender.sendMessage(ChatColor.YELLOW + "(Upgrade)"); } if (description.length() > 0) { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.GREEN + description); } else { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.GREEN + "(No Description)"); } if (owner != null && owner.length() > 0 && ownerId != null && ownerId.length() > 0) { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.WHITE + owner + " (" + ChatColor.GRAY + ownerId + ChatColor.WHITE + ")"); } else { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.WHITE + "(No Owner)"); } super.describe(sender, ignoreProperties); WandTemplate template = getTemplate(); if (template != null) { sender.sendMessage("" + ChatColor.BOLD + ChatColor.GREEN + "Template Configuration:"); ConfigurationSection itemConfig = getConfiguration(); Set<String> ownKeys = itemConfig.getKeys(false); template.describe(sender, ignoreProperties, ownKeys); } } private static String getBrushDisplayName(Messages messages, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String materialName = brush == null ? null : brush.getName(messages); if (materialName == null) { materialName = "none"; } String brushPrefix = ChatColor.translateAlternateColorCodes('&', messages.get("wand.brush_prefix")); return brushPrefix + materialName; } private static String getSpellDisplayName(Messages messages, SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String name = ""; if (spell != null) { String spellPrefix = ChatColor.translateAlternateColorCodes('&', messages.get("wand.spell_prefix")); if (brush != null && spell.usesBrush()) { name = spellPrefix + spell.getName() + " " + getBrushDisplayName(messages, brush) + ChatColor.WHITE; } else { name = spellPrefix + spell.getName() + ChatColor.WHITE; } } return name; } private String getCustomName(String displayName, SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String name = displayName; // $name name = name.replace("$name", wandName); // $path String pathName = getPathName(); if (pathName != null) { name = name.replace("$path", pathName); } // $spell String spellName = spell == null ? "" : spell.getName(); name = name.replace("$spell", spellName); // $brush String brushName = brush == null ? "" : brush.getName(); name = name.replace("$brush", brushName); // $uses name = name.replace("$uses", Integer.toString(getRemainingUses())); return ChatColor.translateAlternateColorCodes('&', name); } private String getActiveWandName(SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String customName = getString("display_name"); if (customName != null && !customName.isEmpty()) { return getCustomName(customName, spell, brush); } // Build wand name int remaining = getRemainingUses(); String wandColorPrefix = (hasUses && remaining <= 1) ? "single_use_prefix" : isModifiable() ? (bound ? "bound_prefix" : "unbound_prefix") : (path != null && path.length() > 0 ? "has_path_prefix" : "unmodifiable_prefix"); String name = ChatColor.translateAlternateColorCodes('&', getMessage(wandColorPrefix)) + getDisplayName(); if (randomizeOnActivate) return name; Set<String> spells = getSpells(); // Add active spell to description Messages messages = controller.getMessages(); boolean showSpell = isModifiable() && hasSpellProgression(); showSpell = !quickCast && (spells.size() > 1 || showSpell) && getMode() != WandMode.SKILLS; if (spell != null && showSpell) { name = getSpellDisplayName(messages, spell, brush) + " (" + name + ChatColor.WHITE + ")"; } if (remaining > 1) { String message = getMessage("uses_remaining_brief"); name = name + ChatColor.DARK_RED + " (" + message.replace("$count", Integer.toString(remaining)) + ChatColor.DARK_RED + ")"; } return name; } private String getActiveWandName(SpellTemplate spell) { return getActiveWandName(spell, mage == null ? MaterialBrush.parseMaterialKey(activeBrush) : mage.getBrush()); } private String getActiveWandName(MaterialBrush brush) { SpellTemplate spell = null; if (activeSpell != null && activeSpell.length() > 0) { spell = controller.getSpellTemplate(activeSpell); } return getActiveWandName(spell, brush); } private String getActiveWandName() { SpellTemplate spell = null; if (activeSpell != null && activeSpell.length() > 0) { spell = controller.getSpellTemplate(activeSpell); } return getActiveWandName(spell); } protected String getDisplayName() { return ChatColor.translateAlternateColorCodes('&', randomizeOnActivate ? getMessage("randomized_name") : wandName); } public void updateName(boolean isActive) { if (isActive) { CompatibilityUtils.setDisplayName(item, !isUpgrade ? getActiveWandName() : ChatColor.translateAlternateColorCodes('&', getMessage("upgrade_prefix")) + getDisplayName()); } else { CompatibilityUtils.setDisplayName(item, ChatColor.stripColor(getDisplayName())); } } private void updateName() { updateName(true); } protected static String convertToHTML(String line) { int tagCount = 1; line = "<span style=\"color:white\">" + line; for (ChatColor c : ChatColor.values()) { tagCount += StringUtils.countMatches(line, c.toString()); String replaceStyle = ""; if (c == ChatColor.ITALIC) { replaceStyle = "font-style: italic"; } else if (c == ChatColor.BOLD) { replaceStyle = "font-weight: bold"; } else if (c == ChatColor.UNDERLINE) { replaceStyle = "text-decoration: underline"; } else { String color = c.name().toLowerCase().replace("_", ""); if (c == ChatColor.LIGHT_PURPLE) { color = "mediumpurple"; } replaceStyle = "color:" + color; } line = line.replace(c.toString(), "<span style=\"" + replaceStyle + "\">"); } for (int i = 0; i < tagCount; i++) { line += "</span>"; } return line; } public String getHTMLDescription() { Collection<String> rawLore = getLore(); Collection<String> lore = new ArrayList<>(); lore.add("<h2>" + convertToHTML(getActiveWandName()) + "</h2>"); for (String line : rawLore) { lore.add(convertToHTML(line)); } return "<div style=\"background-color: black; margin: 8px; padding: 8px\">" + StringUtils.join(lore, "<br/>") + "</div>"; } protected void addPropertyLore(List<String> lore, boolean isSingleSpell) { if (usesMana() && effectiveManaMax > 0) { int manaMax = getManaMax(); if (effectiveManaMax != manaMax) { String fullMessage = getLevelString("mana_amount_boosted", manaMax, controller.getMaxMana()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaMax)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_amount", manaMax, controller.getMaxMana()), lore); } int manaRegeneration = getManaRegeneration(); if (manaRegeneration > 0 && effectiveManaRegeneration > 0) { if (effectiveManaRegeneration != manaRegeneration) { String fullMessage = getLevelString("mana_regeneration_boosted", manaRegeneration, controller.getMaxManaRegeneration()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaRegeneration)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_regeneration", manaRegeneration, controller.getMaxManaRegeneration()), lore); } } if (manaPerDamage > 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_per_damage", manaPerDamage, controller.getMaxManaRegeneration()), lore); } } if (superPowered) { ConfigurationUtils.addIfNotEmpty(getMessage("super_powered"), lore); } if (blockReflectChance > 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("reflect_chance", blockReflectChance), lore); } else if (blockChance != 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("block_chance", blockChance), lore); } float manaMaxBoost = getManaMaxBoost(); if (manaMaxBoost != 0) { ConfigurationUtils.addIfNotEmpty(getPropertyString("mana_boost", manaMaxBoost), lore); } float manaRegenerationBoost = getManaRegenerationBoost(); if (manaRegenerationBoost != 0) { ConfigurationUtils.addIfNotEmpty(getPropertyString("mana_regeneration_boost", manaRegenerationBoost), lore); } if (castSpell != null) { SpellTemplate spell = controller.getSpellTemplate(castSpell); if (spell != null) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_aura").replace("$spell", spell.getName()), lore); } } for (Map.Entry<PotionEffectType, Integer> effect : potionEffects.entrySet()) { ConfigurationUtils.addIfNotEmpty(describePotionEffect(effect.getKey(), effect.getValue()), lore); } // If this is a passive wand, then reduction properties stack onto the mage when worn. // In this case we should show it as such in the lore. if (passive) isSingleSpell = false; if (consumeReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("consume_reduction", consumeReduction), lore); if (costReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("cost_reduction", costReduction), lore); if (cooldownReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("cooldown_reduction", cooldownReduction), lore); if (power > 0) ConfigurationUtils.addIfNotEmpty(getLevelString("power", power), lore); if (superProtected) { ConfigurationUtils.addIfNotEmpty(getMessage("super_protected"), lore); } else if (protection != null) { for (Map.Entry<String, Double> entry : protection.entrySet()) { String protectionType = entry.getKey(); double amount = entry.getValue(); addDamageTypeLore("protection", protectionType, amount, lore); } } ConfigurationSection weaknessConfig = getConfigurationSection("weakness"); if (weaknessConfig != null) { Set<String> keys = weaknessConfig.getKeys(false); for (String key : keys) { addDamageTypeLore("weakness", key, weaknessConfig.getDouble(key), lore); } } ConfigurationSection strengthConfig = getConfigurationSection("strength"); if (strengthConfig != null) { Set<String> keys = strengthConfig.getKeys(false); for (String key : keys) { addDamageTypeLore("strength", key, strengthConfig.getDouble(key), lore); } } if (earnMultiplier > 1) { String earnDescription = getPropertyString("earn_multiplier", earnMultiplier - 1); earnDescription = earnDescription.replace("$type", "SP"); ConfigurationUtils.addIfNotEmpty(earnDescription, lore); } ConfigurationSection attributes = getConfigurationSection("attributes"); if (attributes != null) { // Don't bother with the lore at all if the template has been blanked out String template = getMessage("attributes"); if (!template.isEmpty()) { Set<String> keys = attributes.getKeys(false); for (String key : keys) { String label = controller.getMessages().get("attributes." + key + ".name", key); // We are only display attributes as integers for now int value = attributes.getInt(key); if (value == 0) continue; float max = 1; MagicAttribute attribute = controller.getAttribute(key); if (attribute != null) { Double maxValue = attribute.getMax(); if (maxValue != null) { max = (float)(double)maxValue; } } label = getPropertyString("attributes", value, max).replace("$attribute", label); lore.add(label); } } } } private String getPropertyString(String templateName, float value) { return getPropertyString(templateName, value, 1); } private String getPropertyString(String templateName, float value, float max) { String propertyTemplate = getBoolean("stack") ? "property_stack" : "property_value"; if (value < 0) { propertyTemplate = propertyTemplate + "_negative"; } return controller.getMessages().getPropertyString(getMessageKey(templateName), value, max, getMessageKey(propertyTemplate)); } private String formatPropertyString(String template, float value) { return formatPropertyString(template, value, 1); } private String formatPropertyString(String template, float value, float max) { String propertyTemplate = getBoolean("stack") ? "property_stack" : "property_value"; if (value < 0) { propertyTemplate = propertyTemplate + "_negative"; } return controller.getMessages().formatPropertyString(template, value, max, getMessage(propertyTemplate)); } private void addDamageTypeLore(String property, String propertyType, double amount, List<String> lore) { if (amount != 0) { String templateKey = getMessageKey(property + "." + propertyType); String template; if (controller.getMessages().containsKey(templateKey)) { template = controller.getMessages().get(templateKey); } else { templateKey = getMessageKey("protection.unknown"); template = controller.getMessages().get(templateKey); String pretty = propertyType.substring(0, 1).toUpperCase() + propertyType.substring(1); template = template.replace("$type", pretty); } template = formatPropertyString(template, (float)amount); ConfigurationUtils.addIfNotEmpty(template, lore); } } public String getLevelString(String templateName, float amount) { return controller.getMessages().getLevelString(getMessageKey(templateName), amount); } public String getLevelString(String templateName, float amount, float max) { return controller.getMessages().getLevelString(getMessageKey(templateName), amount, max); } protected List<String> getCustomLore(Collection<String> loreTemplate) { List<String> lore = new ArrayList<>(); for (String line : loreTemplate) { if (line.startsWith("$")) { switch (line) { case "$description": addDescriptionLore(lore); break; case "$path": String pathTemplate = getMessage("path_lore", ""); String pathName = getPathName(); if (pathName != null && !pathTemplate.isEmpty()) { lore.add(pathTemplate.replace("$path", pathName)); } break; case "$owner": addOwnerDescription(lore); break; case "$spells": int spellCount = getSpells().size(); if (spellCount > 0) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_count").replace("$count", Integer.toString(spellCount)), lore); } break; case "$brushes": int materialCount = getBrushes().size(); if (materialCount > 0) { ConfigurationUtils.addIfNotEmpty(getMessage("material_count").replace("$count", Integer.toString(materialCount)), lore); } break; case "$uses": addUseLore(lore); break; case "$mana_max": if (usesMana()) { int manaMax = getManaMax(); if (effectiveManaMax != manaMax) { String fullMessage = getLevelString("mana_amount_boosted", manaMax, controller.getMaxMana()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaMax)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_amount", manaMax, controller.getMaxMana()), lore); } } break; case "$mana_regeneration": if (usesMana()) { int manaRegeneration = getManaRegeneration(); if (manaRegeneration > 0) { if (effectiveManaRegeneration != manaRegeneration) { String fullMessage = getLevelString("mana_regeneration_boosted", manaRegeneration, controller.getMaxManaRegeneration()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaRegeneration)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_regeneration", manaRegeneration, controller.getMaxManaRegeneration()), lore); } } } break; default: lore.add(ChatColor.translateAlternateColorCodes('&', line)); } } else { lore.add(ChatColor.translateAlternateColorCodes('&', line)); } } return lore; } protected void addDescriptionLore(List<String> lore) { String descriptionTemplate = controller.getMessages().get(getMessageKey("description_lore"), ""); if (!description.isEmpty() && !descriptionTemplate.isEmpty()) { if (description.contains("$path")) { String pathName = getPathName(); String description = ChatColor.translateAlternateColorCodes('&', this.description); description = description.replace("$path", pathName == null ? "Unknown" : pathName); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } else { String description = ChatColor.translateAlternateColorCodes('&', this.description); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } } } @Nullable protected String getPathName() { String pathName = null; com.elmakers.mine.bukkit.api.wand.WandUpgradePath path = getPath(); if (path != null) { pathName = path.getName(); } else if (mageClassKeys != null && !mageClassKeys.isEmpty()) { MageClassTemplate classTemplate = controller.getMageClassTemplate(mageClassKeys.get(0)); if (classTemplate != null) { String pathKey = classTemplate.getProperty("path", ""); if (!pathKey.isEmpty()) { path = controller.getPath(pathKey); } if (path != null) { pathName = path.getName(); } else { pathName = classTemplate.getName(); } } } return pathName; } protected void addOwnerDescription(List<String> lore) { if (owner != null && owner.length() > 0) { if (bound) { String ownerDescription = getMessage("bound_description", "$name").replace("$name", owner); ConfigurationUtils.addIfNotEmpty(ownerDescription, lore); } else { String ownerDescription = getMessage("owner_description", "$name").replace("$name", owner); ConfigurationUtils.addIfNotEmpty(ownerDescription, lore); } } } @SuppressWarnings("unchecked") protected List<String> getLore() { Object customLore = getProperty("lore"); if (customLore != null && customLore instanceof Collection) { return getCustomLore((Collection<String>)customLore); } List<String> lore = new ArrayList<>(); int spellCount = getSpells().size(); int materialCount = getBrushes().size(); String pathName = getPathName(); if (description.length() > 0) { if (randomizeOnActivate) { String randomDescription = getMessage("randomized_lore"); String randomTemplate = controller.getMessages().get(getMessageKey("randomized_description"), ""); if (randomDescription.length() > 0 && !randomTemplate.isEmpty()) { InventoryUtils.wrapText(randomTemplate.replace("$description", randomDescription), lore); return lore; } } if (description.contains("$") && !description.contains("$path")) { String newDescription = controller.getMessages().escape(description); if (!newDescription.equals(description)) { this.description = newDescription; setProperty("description", description); } } String descriptionTemplate = controller.getMessages().get(getMessageKey("description_lore"), ""); if (description.contains("$path") && !descriptionTemplate.isEmpty()) { String description = ChatColor.translateAlternateColorCodes('&', this.description); description = description.replace("$path", pathName == null ? "Unknown" : pathName); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } else if (description.contains("$")) { String randomDescription = getMessage("randomized_lore"); String randomTemplate = controller.getMessages().get(getMessageKey("randomized_description"), ""); if (randomDescription.length() > 0 && !randomTemplate.isEmpty()) { randomDescription = ChatColor.translateAlternateColorCodes('&', randomDescription); InventoryUtils.wrapText(randomTemplate.replace("$description", randomDescription), lore); return lore; } } else if (!descriptionTemplate.isEmpty()) { String description = ChatColor.translateAlternateColorCodes('&', this.description); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } } String pathTemplate = getMessage("path_lore", ""); if (pathName != null && !pathTemplate.isEmpty()) { lore.add(pathTemplate.replace("$path", pathName)); } if (!isUpgrade) { addOwnerDescription(lore); } SpellTemplate spell = mage == null ? controller.getSpellTemplate(getActiveSpellKey()) : mage.getSpell(getActiveSpellKey()); Messages messages = controller.getMessages(); // This is here specifically for a wand that only has // one spell now, but may get more later. Since you // can't open the inventory in this state, you can not // otherwise see the spell lore. boolean isSingleSpell = spell != null && spellCount == 1 && !hasInventory && !isUpgrade; if (isSingleSpell) { addSpellLore(messages, spell, lore, getActiveMage(), this); } if (materialCount == 1 && activeBrush != null && activeBrush.length() > 0) { lore.add(getBrushDisplayName(messages, MaterialBrush.parseMaterialKey(activeBrush))); } if (spellCount > 0) { if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_spell_count").replace("$count", Integer.toString(spellCount)), lore); } else if (spellCount > 1) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_count").replace("$count", Integer.toString(spellCount)), lore); } } if (materialCount > 0) { if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_material_count").replace("$count", Integer.toString(materialCount)), lore); } else if (materialCount > 1) { ConfigurationUtils.addIfNotEmpty(getMessage("material_count").replace("$count", Integer.toString(materialCount)), lore); } } addUseLore(lore); addPropertyLore(lore, isSingleSpell); if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_item_description"), lore); } return lore; } protected void addUseLore(List<String> lore) { int remaining = getRemainingUses(); if (!isSingleUse && remaining > 0) { if (isUpgrade) { String message = (remaining == 1) ? getMessage("upgrade_uses_singular") : getMessage("upgrade_uses"); ConfigurationUtils.addIfNotEmpty(message.replace("$count", Integer.toString(remaining)), lore); } else { String message = (remaining == 1) ? getMessage("uses_remaining_singular") : getMessage("uses_remaining_brief"); ConfigurationUtils.addIfNotEmpty(message.replace("$count", Integer.toString(remaining)), lore); } } } protected void updateLore() { CompatibilityUtils.setLore(item, getLore()); } public int getRemainingUses() { return uses; } public void makeEnchantable(boolean enchantable) { if (EnchantableWandMaterial == null) return; if (!enchantable) { item.setType(icon.getMaterial()); item.setDurability(icon.getData()); } else { MaterialSet enchantableMaterials = controller.getMaterialSetManager().getMaterialSetEmpty("enchantable"); if (!enchantableMaterials.testItem(item)) { item.setType(EnchantableWandMaterial); item.setDurability((short) 0); } } updateName(); } public static boolean hasActiveWand(Player player) { if (player == null) return false; ItemStack activeItem = player.getInventory().getItemInMainHand(); return isWand(activeItem); } @Nullable public static Wand getActiveWand(MagicController controller, Player player) { ItemStack activeItem = player.getInventory().getItemInMainHand(); if (isWand(activeItem)) { return controller.getWand(activeItem); } return null; } public static boolean isWand(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, WAND_KEY); } public static boolean isWandOrUpgrade(ItemStack item) { return isWand(item) || isUpgrade(item); } public static boolean isSpecial(ItemStack item) { return isWand(item) || isUpgrade(item) || isSpell(item) || isBrush(item) || isSP(item) || isCurrency(item); } public static boolean isSelfDestructWand(ItemStack item) { return item != null && WAND_SELF_DESTRUCT_KEY != null && InventoryUtils.hasMeta(item, WAND_SELF_DESTRUCT_KEY); } public static boolean isSP(ItemStack item) { return InventoryUtils.hasMeta(item, "sp"); } public static boolean isCurrency(ItemStack item) { return InventoryUtils.hasMeta(item, "currency"); } @Nullable public static Integer getSP(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; String spNode = InventoryUtils.getMetaString(item, "sp"); if (spNode == null) return null; Integer sp = null; try { sp = Integer.parseInt(spNode); } catch (Exception ex) { sp = null; } return sp; } @Nullable public static Double getCurrencyAmount(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object currencyNode = InventoryUtils.getNode(item, "currency"); if (currencyNode == null) return null; return InventoryUtils.getMetaDouble(currencyNode, "amount"); } @Nullable public static String getCurrencyType(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object currencyNode = InventoryUtils.getNode(item, "currency"); if (currencyNode == null) return null; return InventoryUtils.getMetaString(currencyNode, "type"); } public static boolean isSpell(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "spell"); } public static boolean isSkill(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "skill"); } public static boolean isBrush(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "brush"); } @Nullable protected static Object getWandOrUpgradeNode(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) { wandNode = InventoryUtils.getNode(item, UPGRADE_KEY); } return wandNode; } @Nullable public static String getWandTemplate(ItemStack item) { Object wandNode = getWandOrUpgradeNode(item); if (wandNode == null) return null; return InventoryUtils.getMetaString(wandNode, "template"); } @Nullable public static String getWandId(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) return null; return InventoryUtils.getMetaString(wandNode, "id"); } @Nullable public static String getSpell(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "key"); } @Nullable @Override public Spell getSpell(String spellKey, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage == null) { return null; } if (!hasSpell(spellKey)) return null; SpellKey key = new SpellKey(spellKey); spellKey = key.getBaseKey(); Integer level = spellLevels.get(spellKey); if (level != null) { spellKey = new SpellKey(spellKey, level).getKey(); } return mage.getSpell(spellKey); } @Nullable @Override public Spell getSpell(String spellKey) { return getSpell(spellKey, mage); } @Nullable public static String getSpellClass(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "class"); } public static boolean isQuickCastSkill(ItemStack item) { if (InventoryUtils.isEmpty(item)) return false; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return false; Boolean quickCast = InventoryUtils.containsNode(spellNode, "quick_cast") ? InventoryUtils.getMetaBoolean(spellNode, "quick_cast") : null; return quickCast == null ? true : quickCast; } @Nullable public static String getSpellArgs(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "args"); } @Nullable public static String getBrush(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object brushNode = InventoryUtils.getNode(item, "brush"); if (brushNode == null) return null; return InventoryUtils.getMetaString(brushNode, "key"); } protected void updateInventoryName(ItemStack item, boolean activeName) { if (isSpell(item)) { Spell spell = mage.getSpell(getSpell(item)); if (spell != null) { updateSpellName(controller.getMessages(), item, spell, activeName ? this : null, activeBrush); } } else if (isBrush(item)) { updateBrushName(controller.getMessages(), item, getBrush(item), activeName ? this : null); } } public static void updateSpellItem(Messages messages, ItemStack itemStack, SpellTemplate spell, String args, Wand wand, String activeMaterial, boolean isItem) { updateSpellItem(messages, itemStack, spell, args, wand == null ? null : wand.getActiveMage(), wand, activeMaterial, isItem); } public static void updateSpellItem(Messages messages, ItemStack itemStack, SpellTemplate spell, String args, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, String activeMaterial, boolean isItem) { updateSpellName(messages, itemStack, spell, wand, activeMaterial); List<String> lore = new ArrayList<>(); addSpellLore(messages, spell, lore, mage, wand); if (isItem) { ConfigurationUtils.addIfNotEmpty(messages.get("wand.spell_item_description"), lore); } CompatibilityUtils.setLore(itemStack, lore); Object spellNode = CompatibilityUtils.createNode(itemStack, "spell"); CompatibilityUtils.setMeta(spellNode, "key", spell.getKey()); CompatibilityUtils.setMeta(spellNode, "args", args); if (SpellGlow) { CompatibilityUtils.addGlow(itemStack); } } public static void updateSpellName(Messages messages, ItemStack itemStack, SpellTemplate spell, Wand wand, String activeMaterial) { String displayName; if (wand != null && !wand.isQuickCast()) { displayName = wand.getActiveWandName(spell); } else { displayName = getSpellDisplayName(messages, spell, MaterialBrush.parseMaterialKey(activeMaterial)); } CompatibilityUtils.setDisplayName(itemStack, displayName); } public static void updateBrushName(Messages messages, ItemStack itemStack, String materialKey, Wand wand) { updateBrushName(messages, itemStack, MaterialBrush.parseMaterialKey(materialKey), wand); } public static void updateBrushName(Messages messages, ItemStack itemStack, MaterialBrush brush, Wand wand) { String displayName; if (wand != null) { Spell activeSpell = wand.getActiveSpell(); if (activeSpell != null && activeSpell.usesBrush()) { displayName = wand.getActiveWandName(brush); } else { displayName = ChatColor.RED + brush.getName(messages); } } else { displayName = brush.getName(messages); } CompatibilityUtils.setDisplayName(itemStack, displayName); } public static void updateBrushItem(Messages messages, ItemStack itemStack, String materialKey, Wand wand) { updateBrushItem(messages, itemStack, MaterialBrush.parseMaterialKey(materialKey), wand); } public static void updateBrushItem(Messages messages, ItemStack itemStack, MaterialBrush brush, Wand wand) { updateBrushName(messages, itemStack, brush, wand); Object brushNode = CompatibilityUtils.createNode(itemStack, "brush"); CompatibilityUtils.setMeta(brushNode, "key", brush.getKey()); } public void updateHotbar() { if (mage == null) return; if (!isInventoryOpen()) return; Player player = mage.getPlayer(); if (player == null) return; if (!hasStoredInventory()) return; WandMode wandMode = getMode(); if (wandMode == WandMode.INVENTORY) { PlayerInventory inventory = player.getInventory(); updateHotbar(inventory); DeprecatedUtils.updateInventory(player); } } private boolean updateHotbar(PlayerInventory playerInventory) { if (getMode() != WandMode.INVENTORY) return false; Inventory hotbar = getHotbar(); if (hotbar == null) return false; // Make sure the wand is still in the held slot ItemStack currentItem = playerInventory.getItem(heldSlot); if (currentItem == null || !currentItem.getItemMeta().equals(item.getItemMeta())) { controller.getLogger().warning("Trying to update hotbar but the wand has gone missing"); return false; } // Set hotbar items from remaining list int targetOffset = 0; for (int hotbarSlot = 0; hotbarSlot < HOTBAR_INVENTORY_SIZE; hotbarSlot++) { if (hotbarSlot == heldSlot) { targetOffset = 1; } ItemStack hotbarItem = hotbar.getItem(hotbarSlot); updateInventoryName(hotbarItem, true); playerInventory.setItem(hotbarSlot + targetOffset, hotbarItem); } return true; } private void updateInventory() { if (mage == null) return; if (!isInventoryOpen()) return; Player player = mage.getPlayer(); if (player == null) return; WandMode wandMode = getMode(); if (wandMode == WandMode.INVENTORY) { if (!hasStoredInventory()) return; PlayerInventory inventory = player.getInventory(); if (!updateHotbar(inventory)) { for (int i = 0; i < HOTBAR_SIZE; i++) { if (i != inventory.getHeldItemSlot()) { inventory.setItem(i, null); } } } updateInventory(inventory); updateName(); } else if (wandMode == WandMode.CHEST || wandMode == WandMode.SKILLS) { Inventory inventory = getDisplayInventory(); inventory.clear(); updateInventory(inventory); } } private void updateInventory(Inventory targetInventory) { // Set inventory from current page, taking into account hotbar offset int currentOffset = getHotbarSize() > 0 ? HOTBAR_SIZE : 0; List<Inventory> inventories = this.inventories; if (openInventoryPage < inventories.size()) { Inventory inventory = inventories.get(openInventoryPage); ItemStack[] contents = inventory.getContents(); for (int i = 0; i < contents.length; i++) { ItemStack inventoryItem = contents[i]; updateInventoryName(inventoryItem, false); targetInventory.setItem(currentOffset, inventoryItem); currentOffset++; } } for (;currentOffset < targetInventory.getSize() && currentOffset < PLAYER_INVENTORY_SIZE; currentOffset++) { targetInventory.setItem(currentOffset, null); } } protected static void addSpellLore(Messages messages, SpellTemplate spell, List<String> lore, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand) { spell.addLore(messages, mage, wand, lore); } private String getInventoryTitle() { return getMessage("chest_inventory_title", "Wand"); } protected Inventory getOpenInventory() { while (openInventoryPage >= inventories.size()) { inventories.add(CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle())); } return inventories.get(openInventoryPage); } protected Inventory getDisplayInventory() { if (displayInventory == null || displayInventory.getSize() != getInventorySize()) { displayInventory = CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle()); } return displayInventory; } public void saveChestInventory() { if (displayInventory == null) return; Inventory openInventory = getOpenInventory(); Map<String, Integer> previousSlots = new HashMap<>(); Set<String> addedBack = new HashSet<>(); for (int i = 0; i < displayInventory.getSize(); i++) { ItemStack playerItem = displayInventory.getItem(i); String itemSpellKey = getSpell(playerItem); if (!updateSlot(i + openInventoryPage * getInventorySize(), playerItem)) { playerItem = new ItemStack(Material.AIR); displayInventory.setItem(i, playerItem); } else if (itemSpellKey != null) { addedBack.add(itemSpellKey); } // We don't want to clear items that were taken out, so save them to check later ItemStack current = openInventory.getItem(i); String spellKey = getSpell(current); if (spellKey != null) { previousSlots.put(spellKey, i); } openInventory.setItem(i, playerItem); } // Put back any items that were taken out for (Map.Entry<String, Integer> entry : previousSlots.entrySet()) { if (!addedBack.contains(entry.getKey())) { ItemStack current = openInventory.getItem(entry.getValue()); ItemStack itemStack = createSpellItem(entry.getKey(), "", false); if (current == null || current.getType() == Material.AIR) { openInventory.setItem(entry.getValue(), itemStack); } else { openInventory.addItem(itemStack); } } } } public void saveInventory() { if (mage == null) return; if (getMode() == WandMode.SKILLS) { saveChestInventory(); return; } if (!isInventoryOpen()) return; if (mage.getPlayer() == null) return; if (getMode() != WandMode.INVENTORY) return; if (!hasStoredInventory()) return; // Work-around glitches that happen if you're dragging an item on death if (mage.isDead()) return; // Fill in the hotbar Player player = mage.getPlayer(); PlayerInventory playerInventory = player.getInventory(); Inventory hotbar = getHotbar(); if (hotbar != null) { int saveOffset = 0; for (int i = 0; i < HOTBAR_SIZE; i++) { ItemStack playerItem = playerInventory.getItem(i); if (isWand(playerItem)) { saveOffset = -1; continue; } int hotbarOffset = i + saveOffset; if (hotbarOffset >= hotbar.getSize()) { // This can happen if there is somehow no wand in the wand inventory. break; } if (!updateSlot(i + saveOffset + currentHotbar * HOTBAR_INVENTORY_SIZE, playerItem)) { playerItem = new ItemStack(Material.AIR); playerInventory.setItem(i, playerItem); } hotbar.setItem(i + saveOffset, playerItem); } } // Fill in the active inventory page int hotbarOffset = getHotbarSize(); Inventory openInventory = getOpenInventory(); for (int i = 0; i < openInventory.getSize(); i++) { ItemStack playerItem = playerInventory.getItem(i + HOTBAR_SIZE); if (!updateSlot(i + hotbarOffset + openInventoryPage * getInventorySize(), playerItem)) { playerItem = new ItemStack(Material.AIR); playerInventory.setItem(i + HOTBAR_SIZE, playerItem); } openInventory.setItem(i, playerItem); } } protected boolean updateSlot(int slot, ItemStack item) { if (item == null || item.getType() == Material.AIR) return true; String spellKey = getSpell(item); if (spellKey != null) { SpellKey key = new SpellKey(spellKey); spellInventory.put(key.getBaseKey(), slot); } else { String brushKey = getBrush(item); if (brushKey != null) { brushInventory.put(brushKey, slot); } else if (mage != null) { // Must have been an item inserted directly into player's inventory? mage.giveItem(item); return false; } } return true; } @Override public int enchant(int totalLevels, com.elmakers.mine.bukkit.api.magic.Mage mage, boolean addSpells) { return randomize(totalLevels, true, mage, addSpells); } @Override public int enchant(int totalLevels, com.elmakers.mine.bukkit.api.magic.Mage mage) { return randomize(totalLevels, true, mage, true); } @Override public int enchant(int totalLevels) { return randomize(totalLevels, true, null, true); } protected int randomize(int totalLevels, boolean additive, com.elmakers.mine.bukkit.api.magic.Mage enchanter, boolean addSpells) { if (enchanter == null && mage != null) { enchanter = mage; } if (maxEnchantCount > 0 && enchantCount >= maxEnchantCount) { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("max_enchanted").replace("$wand", getName())); } return 0; } WandUpgradePath path = getPath(); if (path == null) { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("no_path").replace("$wand", getName())); } return 0; } int minLevel = path.getMinLevel(); if (totalLevels < minLevel) { if (enchanter != null && addSpells) { String levelMessage = getMessage("need_more_levels"); levelMessage = levelMessage.replace("$levels", Integer.toString(minLevel)); enchanter.sendMessage(levelMessage); } return 0; } // Just a hard-coded sanity check int maxLevel = path.getMaxLevel(); totalLevels = Math.min(totalLevels, maxLevel * 50); int addLevels = Math.min(totalLevels, maxLevel); int levels = 0; boolean modified = true; while (addLevels >= minLevel && modified) { boolean hasUpgrade = path.hasUpgrade(); WandLevel level = path.getLevel(addLevels); if (!path.canEnchant(this) && (path.hasSpells() || path.hasMaterials())) { // Check for level up WandUpgradePath nextPath = path.getUpgrade(); if (nextPath != null) { if (path.checkUpgradeRequirements(this, addSpells ? enchanter : null)) { path.upgrade(this, enchanter); } break; } else { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("fully_enchanted").replace("$wand", getName())); } break; } } modified = level.randomizeWand(enchanter, this, additive, hasUpgrade, addSpells); totalLevels -= maxLevel; if (modified) { if (enchanter != null) { path.enchanted(enchanter); } levels += addLevels; // Check for level up WandUpgradePath nextPath = path.getUpgrade(); if (nextPath != null && path.checkUpgradeRequirements(this, null) && !path.canEnchant(this)) { path.upgrade(this, enchanter); path = nextPath; } } else if (path.canEnchant(this)) { if (enchanter != null && levels == 0 && addSpells) { String message = getMessage("require_more_levels"); enchanter.sendMessage(message); } } else if (hasUpgrade) { if (path.checkUpgradeRequirements(this, addSpells ? enchanter : null)) { path.upgrade(this, enchanter); levels += addLevels; } } else if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("fully_enchanted").replace("$wand", getName())); } addLevels = Math.min(totalLevels, maxLevel); additive = true; } if (levels > 0) { enchantCount++; setProperty("enchant_count", enchantCount); } saveState(); updateName(); updateLore(); return levels; } protected void randomize() { if (template != null && template.length() > 0) { ConfigurationSection wandConfig = controller.getWandTemplateConfiguration(template); if (wandConfig != null && wandConfig.contains("icon")) { String iconKey = wandConfig.getString("icon"); if (iconKey.contains(",")) { Random r = new Random(); String[] keys = StringUtils.split(iconKey, ','); iconKey = keys[r.nextInt(keys.length)]; } setIcon(ConfigurationUtils.toMaterialAndData(iconKey)); updateIcon(); playEffects("randomize"); } } } @Nullable public static Wand createWand(MagicController controller, String templateName) { if (controller == null) return null; Wand wand = null; try { wand = new Wand(controller, templateName); } catch (UnknownWandException ignore) { // the Wand constructor throws an exception on an unknown template } catch (Exception ex) { ex.printStackTrace(); } return wand; } @Nullable public static Wand createWand(MagicController controller, ItemStack itemStack) { if (controller == null) return null; Wand wand = null; try { wand = controller.getWand(InventoryUtils.makeReal(itemStack)); wand.saveState(); wand.updateName(); } catch (Exception ex) { ex.printStackTrace(); } return wand; } @Override public boolean add(com.elmakers.mine.bukkit.api.wand.Wand other) { if (other instanceof Wand) { return add((Wand)other); } return false; } public boolean add(Wand other) { return add(other, this.mage); } @Override public boolean add(com.elmakers.mine.bukkit.api.wand.Wand other, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (other instanceof Wand) { return add((Wand)other, mage); } return false; } public boolean add(Wand other, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (!isModifiable()) { // Only allow upgrading a modifiable wand via an upgrade item // and only if the paths match. if (!other.isUpgrade() || other.path == null || path == null || other.path.isEmpty() || path.isEmpty() || !other.path.equals(path)) { return false; } } // Can't combine limited-use wands if (hasUses || other.hasUses) { return false; } if (isHeroes || other.isHeroes) { return false; } ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(other.getTemplateKey()); // Check for forced upgrades if (other.isForcedUpgrade()) { if (templateConfig == null) { return false; } templateConfig = ConfigurationUtils.cloneConfiguration(templateConfig); templateConfig.set("name", templateConfig.getString("upgrade_name")); templateConfig.set("description", templateConfig.getString("upgrade_description")); templateConfig.set("force", null); templateConfig.set("upgrade", null); templateConfig.set("icon", templateConfig.getString("upgrade_icon")); templateConfig.set("indestructible", null); templateConfig.set("upgrade_icon", null); configure(templateConfig); return true; } // Don't allow upgrades from an item on a different path if (other.isUpgrade() && other.path != null && !other.path.isEmpty() && (this.path == null || !this.path.equals(other.path))) { return false; } ConfigurationSection upgradeConfig = ConfigurationUtils.cloneConfiguration(other.getEffectiveConfiguration()); upgradeConfig.set("id", null); upgradeConfig.set("indestructible", null); upgradeConfig.set("upgrade", null); upgradeConfig.set("icon", other.upgradeIcon == null ? null : other.upgradeIcon.getKey()); upgradeConfig.set("upgrade_icon", null); upgradeConfig.set("template", other.upgradeTemplate); Messages messages = controller.getMessages(); if (other.rename && templateConfig != null) { String newName = messages.get("wands." + other.template + ".name"); newName = templateConfig.getString("name", newName); upgradeConfig.set("name", newName); } else { upgradeConfig.set("name", null); } if (other.renameDescription && templateConfig != null) { String newDescription = messages.get("wands." + other.template + ".description"); newDescription = templateConfig.getString("description", newDescription); upgradeConfig.set("description", newDescription); } else { upgradeConfig.set("description", null); } return upgrade(upgradeConfig); } public boolean isForcedUpgrade() { return isUpgrade && forceUpgrade; } public boolean keepOnDeath() { return keep; } public static WandMode parseWandMode(String modeString, WandMode defaultValue) { if (modeString != null && !modeString.isEmpty()) { try { defaultValue = WandMode.valueOf(modeString.toUpperCase()); } catch (Exception ignored) { } } return defaultValue; } public static WandAction parseWandAction(String actionString, WandAction defaultValue) { if (actionString != null && !actionString.isEmpty()) { try { defaultValue = WandAction.valueOf(actionString.toUpperCase()); } catch (Exception ignored) { } } return defaultValue; } private void updateActiveMaterial() { if (mage == null) return; if (activeBrush == null) { mage.clearBuildingMaterial(); } else { com.elmakers.mine.bukkit.api.block.MaterialBrush brush = mage.getBrush(); brush.update(activeBrush); } } public void cycleActive(int direction) { Player player = mage != null ? mage.getPlayer() : null; if (player != null && player.isSneaking()) { com.elmakers.mine.bukkit.api.spell.Spell activeSpell = getActiveSpell(); boolean cycleMaterials = false; if (activeSpell != null) { cycleMaterials = activeSpell.usesBrushSelection(); } if (cycleMaterials) { cycleMaterials(direction); } else { cycleSpells(direction); } } else { cycleSpells(direction); } } public void toggleInventory() { if (mage != null && mage.cancelSelection()) { mage.playSoundEffect(noActionSound); return; } Player player = mage == null ? null : mage.getPlayer(); boolean isSneaking = player != null && player.isSneaking(); Spell currentSpell = getActiveSpell(); if (getBrushMode() == WandMode.CHEST && brushSelectSpell != null && !brushSelectSpell.isEmpty() && isSneaking && currentSpell != null && currentSpell.usesBrushSelection()) { Spell brushSelect = mage.getSpell(brushSelectSpell); if (brushSelect != null) { brushSelect.cast(); return; } } if (!hasInventory) { if (activeSpell == null || activeSpell.length() == 0) { // Sanity check, so it'll switch to inventory next time updateHasInventory(); if (spells.size() > 0) { setActiveSpell(spells.iterator().next()); } } updateName(); return; } if (!isInventoryOpen()) { openInventory(); } else { closeInventory(); } } public void updateHasInventory() { int inventorySize = getSpells().size() + getBrushes().size(); hasInventory = inventorySize > 1 || (inventorySize == 1 && hasSpellProgression) || autoFill; } public void cycleInventory() { cycleInventory(1); } public void cycleInventory(int direction) { if (!hasInventory) { return; } if (isInventoryOpen()) { saveInventory(); int inventoryCount = inventories.size(); setOpenInventoryPage(inventoryCount == 0 ? 0 : (openInventoryPage + inventoryCount + direction) % inventoryCount); updateInventory(); if (mage != null && inventories.size() > 1) { if (!playPassiveEffects("cycle") && inventoryCycleSound != null) { mage.playSoundEffect(inventoryCycleSound); } DeprecatedUtils.updateInventory(mage.getPlayer()); } } } @Override public void cycleHotbar() { cycleHotbar(1); } public void cycleHotbar(int direction) { if (!hasInventory || getMode() != WandMode.INVENTORY) { return; } if (isInventoryOpen() && mage != null && hotbars.size() > 1) { saveInventory(); int hotbarCount = hotbars.size(); setCurrentHotbar(hotbarCount == 0 ? 0 : (currentHotbar + hotbarCount + direction) % hotbarCount); updateHotbar(); if (!playPassiveEffects("cycle") && inventoryCycleSound != null) { mage.playSoundEffect(inventoryCycleSound); } sendMessage("hotbar_changed"); updateHotbarStatus(); DeprecatedUtils.updateInventory(mage.getPlayer()); } } public void openInventory() { if (mage == null) return; if (System.currentTimeMillis() < mage.getWandDisableTime()) return; WandMode wandMode = getMode(); if (wandMode == WandMode.CHEST || wandMode == WandMode.SKILLS) { inventoryIsOpen = true; if (!playPassiveEffects("open") && inventoryOpenSound != null) { mage.playSoundEffect(inventoryOpenSound); } updateInventory(); mage.getPlayer().openInventory(getDisplayInventory()); } else if (wandMode == WandMode.INVENTORY) { if (hasStoredInventory()) return; if (storeInventory()) { inventoryIsOpen = true; showActiveIcon(true); if (!playPassiveEffects("open") && inventoryOpenSound != null) { mage.playSoundEffect(inventoryOpenSound); } updateInventory(); updateHotbarStatus(); } } } @Override public void closeInventory() { closeInventory(true); } public void closeInventory(boolean closePlayerInventory) { if (!isInventoryOpen()) return; controller.disableItemSpawn(); inventoryWasOpen = true; WandMode mode = getMode(); try { saveInventory(); updateSpellInventory(); updateBrushInventory(); inventoryIsOpen = false; if (mage != null) { if (!playPassiveEffects("close") && inventoryCloseSound != null) { mage.playSoundEffect(inventoryCloseSound); } if (mode == WandMode.INVENTORY) { restoreInventory(); showActiveIcon(false); } else if (closePlayerInventory) { mage.getPlayer().closeInventory(); } // Check for items the player might've glitched onto their body... PlayerInventory inventory = mage.getPlayer().getInventory(); ItemStack testItem = inventory.getHelmet(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setHelmet(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getBoots(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setBoots(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getLeggings(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setLeggings(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getChestplate(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setChestplate(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } // This is kind of a hack :( testItem = inventory.getItemInOffHand(); if ((isSpell(testItem) && !isSkill(testItem)) || isBrush(testItem)) { inventory.setItemInOffHand(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } } } catch (Throwable ex) { restoreInventory(); } if (mode == WandMode.INVENTORY && mage != null && closePlayerInventory) { try { mage.getPlayer().closeInventory(); } catch (Throwable ex) { ex.printStackTrace(); } } controller.enableItemSpawn(); inventoryWasOpen = false; } @Override public boolean fill(Player player) { return fill(player, 0); } @Override public boolean fill(Player player, int maxLevel) { // This is for the editor, it saves using player logins and is *not* // directly related to mage ids. This has to use player id. String playerId = player.getUniqueId().toString(); closeInventory(); // Update the inventory to make sure we don't overwrite slots of current spells if (this.mage != null) { buildInventory(); } Collection<String> currentSpells = new ArrayList<>(getSpells()); for (String spellKey : currentSpells) { SpellTemplate spell = controller.getSpellTemplate(spellKey); boolean removeSpell = !spell.hasCastPermission(player); String creatorId = spell.getCreatorId(); removeSpell = removeSpell || (FILL_CREATOR && (creatorId == null || !playerId.equals(creatorId))); if (removeSpell) { removeSpell(spellKey); } } Collection<SpellTemplate> allSpells = controller.getPlugin().getSpellTemplates(); // Hack to prevent messaging Mage mage = this.mage; this.mage = null; for (SpellTemplate spell : allSpells) { String key = spell.getKey(); if (maxLevel > 0 && spell.getSpellKey().getLevel() > maxLevel) { continue; } if (key.startsWith("heroes*")) { continue; } String creatorId = spell.getCreatorId(); if (FILL_CREATOR && (creatorId == null || !playerId.equals(creatorId))) { continue; } if (spell.hasCastPermission(player) && spell.hasIcon() && !spell.isHidden()) { addSpell(key); } } this.mage = mage; updateSpellInventory(); updateBrushInventory(); if (this.mage != null) { buildInventory(); } if (!FILL_CREATOR) { if (autoFill) setProperty("fill", false); autoFill = false; } saveState(); return true; } protected void checkActiveMaterial() { if (activeBrush == null || activeBrush.length() == 0) { Set<String> materials = getBrushes(); if (materials.size() > 0) { activeBrush = materials.iterator().next(); } } } @Override public boolean addItem(ItemStack item) { if (isUpgrade) return false; if (isModifiable() && isSpell(item) && !isSkill(item)) { String spell = getSpell(item); SpellKey spellKey = new SpellKey(spell); Integer currentLevel = spellLevels.get(spellKey.getBaseKey()); if ((currentLevel == null || currentLevel < spellKey.getLevel()) && addSpell(spell)) { return true; } } else if (isModifiable() && isBrush(item)) { String materialKey = getBrush(item); Set<String> materials = getBrushes(); if (!materials.contains(materialKey) && addBrush(materialKey)) { return true; } } else if (isUpgrade(item)) { Wand wand = controller.getWand(item); return this.add(wand); } if (mage != null && !mage.isAtMaxSkillPoints() && controller.skillPointItemsEnabled()) { Integer sp = getSP(item); if (sp != null) { int amount = (int)Math.floor(mage.getEarnMultiplier() * sp * item.getAmount()); mage.addSkillPoints(amount); return true; } } return false; } protected void updateEffects() { updateEffects(mage); } public void updateEffects(Mage mage) { if (mage == null) return; Player player = mage.getPlayer(); if (player == null) return; // Update Bubble effects effects if (effectBubbles && effectColor != null) { Location potionEffectLocation = player.getLocation(); potionEffectLocation.setX(potionEffectLocation.getX() + random.nextDouble() - 0.5); potionEffectLocation.setY(potionEffectLocation.getY() + random.nextDouble() * player.getEyeHeight()); potionEffectLocation.setZ(potionEffectLocation.getZ() + random.nextDouble() - 0.5); EffectPlayer.displayParticle(Particle.SPELL_MOB, potionEffectLocation, 0, 0, 0, 0, 0, 1, effectColor.getColor(), null, (byte)0, 24); } Location location = mage.getLocation(); long now = System.currentTimeMillis(); boolean playEffects = !activeEffectsOnly || inventoryIsOpen || isInOffhand; if (playEffects && effectParticle != null && effectParticleInterval > 0 && effectParticleCount > 0) { boolean velocityCheck = true; if (effectParticleMinVelocity > 0) { double velocitySquared = effectParticleMinVelocity * effectParticleMinVelocity; Vector velocity = mage.getVelocity().clone(); velocity.setY(0); double speedSquared = velocity.lengthSquared(); velocityCheck = (speedSquared > velocitySquared); } if (velocityCheck && (lastParticleEffect == 0 || now > lastParticleEffect + effectParticleInterval)) { lastParticleEffect = now; Location effectLocation = player.getLocation(); Location eyeLocation = player.getEyeLocation(); effectLocation.setY(eyeLocation.getY() + effectParticleOffset); if (effectPlayer == null) { effectPlayer = new EffectRing(controller.getPlugin()); effectPlayer.setParticleCount(1); effectPlayer.setIterations(1); effectPlayer.setParticleOffset(0, 0, 0); } effectPlayer.setMaterial(location.getBlock().getRelative(BlockFace.DOWN)); if (effectParticleData == 0) { effectPlayer.setColor(getEffectColor()); } else { effectPlayer.setColor(null); } effectPlayer.setParticleType(effectParticle); effectPlayer.setParticleData(effectParticleData); effectPlayer.setSize(effectParticleCount); effectPlayer.setRadius((float)effectParticleRadius); effectPlayer.start(effectLocation, null); } } if (castSpell != null && castInterval > 0) { if (lastSpellCast == 0 || now > lastSpellCast + castInterval) { boolean velocityCheck = true; if (castMinVelocity > 0) { double velocitySquared = castMinVelocity * castMinVelocity; Vector velocity = mage.getVelocity(); if (castVelocityDirection != null) { velocity = velocity.clone().multiply(castVelocityDirection); // This is kind of a hack to make jump-detection work. if (castVelocityDirection.getY() < 0) { velocityCheck = velocity.getY() < 0; } else { velocityCheck = velocity.getY() > 0; } } if (velocityCheck) { double speedSquared = velocity.lengthSquared(); velocityCheck = (speedSquared > velocitySquared); } } if (velocityCheck) { lastSpellCast = now; Spell spell = mage.getSpell(castSpell); if (spell != null) { if (castParameters == null) { castParameters = new MemoryConfiguration(); } castParameters.set("passive", true); mage.setCostFree(true); mage.setQuiet(true); try { spell.cast(castParameters); } catch (Exception ex) { controller.getLogger().log(Level.WARNING, "Error casting aura spell " + spell.getKey(), ex); } mage.setCostFree(false); mage.setQuiet(false); } } } } if (playEffects && effectSound != null && controller.soundsEnabled() && effectSoundInterval > 0) { if (lastSoundEffect == 0 || now > lastSoundEffect + effectSoundInterval) { lastSoundEffect = now; effectSound.play(controller.getPlugin(), mage.getPlayer()); } } } protected void updateDurability() { int maxDurability = item.getType().getMaxDurability(); if (maxDurability > 0 && effectiveManaMax > 0) { int durability = (short)(getMana() * maxDurability / effectiveManaMax); durability = maxDurability - durability; if (durability >= maxDurability) { durability = maxDurability - 1; } else if (durability < 0) { durability = 0; } item.setDurability((short)durability); } } public boolean usesXPBar() { return (usesCurrency() && currencyMode.useXP()) || (usesMana() && manaMode.useXP()); } public boolean usesXPNumber() { return (usesCurrency() && currencyMode.useXPNumber()) || (usesMana() && manaMode.useXP()); } public boolean hasSpellProgression() { return hasSpellProgression; } public boolean usesXPDisplay() { return usesXPBar() || usesXPNumber(); } @Override public void updateMana() { Player player = mage == null ? null : mage.getPlayer(); if (player == null) return; float mana = getMana(); if (usesMana()) { if (manaMode.useGlow()) { if (mana == effectiveManaMax) { CompatibilityUtils.addGlow(item); } else { CompatibilityUtils.removeGlow(item); } } if (manaMode.useDurability()) { updateDurability(); } } if (usesXPDisplay()) { int playerLevel = player.getLevel(); float playerProgress = player.getExp(); if (usesMana() && manaMode.useXPNumber()) { playerLevel = (int) mana; } if (usesMana() && manaMode.useXPBar()) { playerProgress = Math.min(Math.max(0, mana / effectiveManaMax), 1); } if (usesCurrency() && currencyMode.useXPNumber()) { playerLevel = (int)Math.ceil(currencyDisplay.getBalance(mage, this)); } mage.sendExperience(playerProgress, playerLevel); } } @Override public boolean isInventoryOpen() { return mage != null && inventoryIsOpen; } // Somewhat hacky method to handle inventory close event knowing that this was a wand inventory that just closed. public boolean wasInventoryOpen() { return inventoryWasOpen; } @Override public void unbind() { if (!bound) return; com.elmakers.mine.bukkit.api.magic.Mage owningMage = this.mage; deactivate(); if (ownerId != null) { if (owningMage == null || !owningMage.getId().equals(ownerId)) { owningMage = controller.getRegisteredMage(ownerId); } if (owningMage != null) { owningMage.unbind(this); } ownerId = null; } bound = false; owner = null; setProperty("bound", false); setProperty("owner", null); setProperty("owner_id", null); saveState(); updateLore(); updateName(); } @Override public void bind() { if (bound) return; Mage holdingMage = mage; deactivate(); bound = true; setProperty("bound", true); saveState(); if (holdingMage != null) { holdingMage.checkWand(); } } @Override public void deactivate() { deactivate(true); } public void deactivate(boolean closePlayerInventory) { if (mage == null) return; // Play deactivate FX playPassiveEffects("deactivate"); // Cancel effects if (effectContext != null) { int cancelDelay = getInt("cancel_effects_delay", 0); if (cancelDelay == 0) { effectContext.cancelEffects(); } else { Plugin plugin = controller.getPlugin(); final WandEffectContext context = effectContext; plugin.getServer().getScheduler().runTaskLater(plugin, new Runnable() { @Override public void run() { context.cancelEffects(); } }, cancelDelay * 20 / 1000); } } Mage mage = this.mage; if (isInventoryOpen()) { closeInventory(closePlayerInventory); } showActiveIcon(false); storedInventory = null; if (usesXPNumber() || usesXPBar()) { mage.resetSentExperience(); } saveState(); mage.deactivateWand(this); this.mage = null; updateMaxMana(true); } @Nullable @Override public Spell getActiveSpell() { if (mage == null) return null; String activeSpellKey = getActiveSpellKey(); if (activeSpellKey == null || activeSpellKey.length() == 0) return null; return mage.getSpell(activeSpellKey); } @Nullable public Spell getAlternateSpell() { if (mage == null || alternateSpell == null || alternateSpell.length() == 0) return null; return mage.getSpell(alternateSpell); } @Nullable public Spell getAlternateSpell2() { if (mage == null || alternateSpell2 == null || alternateSpell2.length() == 0) return null; return mage.getSpell(alternateSpell2); } @Nullable @Override public SpellTemplate getBaseSpell(String spellName) { return getBaseSpell(new SpellKey(spellName)); } @Nullable public SpellTemplate getBaseSpell(SpellKey key) { if (!spells.contains(key.getBaseKey())) return null; SpellKey baseKey = new SpellKey(key.getBaseKey(), getSpellLevel(key.getBaseKey())); return controller.getSpellTemplate(baseKey.getKey()); } @Override public String getActiveSpellKey() { String activeSpellKey = activeSpell; Integer level = spellLevels.get(activeSpellKey); if (level != null) { activeSpellKey = new SpellKey(activeSpellKey, level).getKey(); } return activeSpellKey; } @Override public String getActiveBrushKey() { return activeBrush; } @Override public void damageDealt(double damage, Entity target) { if (manaPerDamage > 0) { int manaMax = getEffectiveManaMax(); float mana = getMana(); if (manaMax > 0 && mana < manaMax) { setMana(Math.min(manaMax, mana + (float)damage * manaPerDamage)); updateMana(); } } } public boolean alternateCast() { return cast(getAlternateSpell()); } public boolean alternateCast2() { return cast(getAlternateSpell2()); } @Override public boolean cast() { return cast(getActiveSpell(), null); } @Override public boolean cast(String[] parameters) { return cast(getActiveSpell(), parameters); } public boolean cast(Spell spell) { return cast(spell, null); } public boolean cast(Spell spell, String[] parameters) { if (spell != null) { Collection<String> castParameters = null; if (castOverrides != null && castOverrides.size() > 0) { castParameters = new ArrayList<>(); for (Map.Entry<String, String> entry : castOverrides.entrySet()) { String[] key = StringUtils.split(entry.getKey(), '.'); if (key.length == 0) continue; if (key.length == 2 && !key[0].equals("default") && !key[0].equals(spell.getSpellKey().getBaseKey()) && !key[0].equals(spell.getSpellKey().getKey())) { continue; } castParameters.add(key.length == 2 ? key[1] : key[0]); castParameters.add(entry.getValue()); } } if (parameters != null) { if (castParameters == null) { castParameters = new ArrayList<>(); } for (String parameter : parameters) { castParameters.add(parameter); } } if (spell.cast(castParameters == null ? null : castParameters.toArray(EMPTY_PARAMETERS))) { Color spellColor = spell.getColor(); use(); if (spellColor != null && this.effectColor != null) { this.effectColor = this.effectColor.mixColor(spellColor, effectColorSpellMixWeight); setProperty("effect_color", effectColor.toString()); // Note that we don't save this change. // The hope is that the wand will get saved at some point later // And we don't want to trigger NBT writes every spell cast. // And the effect color morphing isn't all that important if a few // casts get lost. } updateHotbarStatus(); return true; } } return false; } protected void use() { if (hasUses) { findItem(); ItemStack item = getItem(); if (item.getAmount() > 1) { item.setAmount(item.getAmount() - 1); } else { if (uses > 0) { uses--; } if (uses <= 0 && mage != null) { // If the wand is not currently active it will be destroyed on next activate Player player = mage.getPlayer(); deactivate(); PlayerInventory playerInventory = player.getInventory(); if (item.getAmount() > 1) { item.setAmount(item.getAmount() - 1); } else { if (isInOffhand) { playerInventory.setItemInOffHand(new ItemStack(Material.AIR, 1)); } else { playerInventory.setItemInMainHand(new ItemStack(Material.AIR, 1)); } } DeprecatedUtils.updateInventory(player); } setProperty("uses", uses); saveState(); updateName(); updateLore(); } } } // Taken from NMS HumanEntity public static int getExpToLevel(int expLevel) { return expLevel >= 30 ? 112 + (expLevel - 30) * 9 : (expLevel >= 15 ? 37 + (expLevel - 15) * 5 : 7 + expLevel * 2); } public static int getExperience(int expLevel, float expProgress) { int xp = 0; for (int level = 0; level < expLevel; level++) { xp += Wand.getExpToLevel(level); } return xp + (int) (expProgress * Wand.getExpToLevel(expLevel)); } protected void updateHotbarStatus() { Player player = mage == null ? null : mage.getPlayer(); if (player != null && LiveHotbar && getMode() == WandMode.INVENTORY && isInventoryOpen()) { mage.updateHotbarStatus(); } } @Override public boolean tickMana() { if (isHeroes) { HeroesManager heroes = controller.getHeroes(); if (heroes != null && mage != null && mage.isPlayer()) { Player player = mage.getPlayer(); effectiveManaMax = heroes.getMaxMana(player); effectiveManaRegeneration = heroes.getManaRegen(player); setManaMax(effectiveManaMax); setManaRegeneration(effectiveManaRegeneration); setMana(heroes.getMana(player)); return true; } return false; } return super.tickMana(); } @Override public void tick() { if (mage == null) return; Player player = mage.getPlayer(); if (player == null) return; super.tick(); if (usesMana() && !isInOffhand) { updateMana(); } if (player.isBlocking() && blockMageCooldown > 0) { mage.setRemainingCooldown(blockMageCooldown); } // Update hotbar glow if (!isInOffhand) { updateHotbarStatus(); } if (!passive) { updateEffects(); } } @Override public void armorUpdated() { updateMaxMana(true); } protected void updateMaxMana(boolean updateLore) { if (isHeroes) return; if (!hasOwnMana() && mageClass != null) { if (mageClass.updateMaxMana(mage) && updateLore) { updateLore(); } effectiveManaMax = mageClass.getEffectiveManaMax(); effectiveManaRegeneration = mageClass.getEffectiveManaRegeneration(); } else if (super.updateMaxMana(mage) && updateLore) { updateLore(); } } public void cycleSpells(int direction) { ArrayList<String> spells = new ArrayList<>(this.spells); if (spells.size() == 0) return; if (activeSpell == null) { setActiveSpell(spells.get(0)); return; } int spellIndex = 0; for (int i = 0; i < spells.size(); i++) { if (spells.get(i).equals(activeSpell)) { spellIndex = i; break; } } spellIndex = (spellIndex + direction) % spells.size(); setActiveSpell(spells.get(spellIndex)); } public void cycleMaterials(int direction) { Set<String> materialsSet = getBrushes(); ArrayList<String> materials = new ArrayList<>(materialsSet); if (materials.size() == 0) return; if (activeBrush == null) { setActiveBrush(StringUtils.split(materials.get(0), '@')[0]); return; } int materialIndex = 0; for (int i = 0; i < materials.size(); i++) { if (StringUtils.split(materials.get(i),'@')[0].equals(activeBrush)) { materialIndex = i; break; } } materialIndex = (materialIndex + direction) % materials.size(); setActiveBrush(StringUtils.split(materials.get(materialIndex), '@')[0]); } @Nullable public Mage getActiveMage() { // TODO: Duplicate of #getMage() return mage; } public void setActiveMage(com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage instanceof Mage) { this.mage = (Mage)mage; armorUpdated(); } } @Nullable @Override public Color getEffectColor() { return effectColor == null ? null : effectColor.getColor(); } public Particle getEffectParticle() { return effectParticle; } @Nullable @Override public String getEffectParticleName() { return effectParticle == null ? null : effectParticle.name(); } @Nullable public Inventory getHotbar() { if (this.hotbars.size() == 0) return null; if (currentHotbar < 0 || currentHotbar >= this.hotbars.size()) { setCurrentHotbar(0); } return this.hotbars.get(currentHotbar); } public int getHotbarCount() { if (getMode() != WandMode.INVENTORY) return 0; return hotbars.size(); } public List<Inventory> getHotbars() { return hotbars; } @Override public boolean isQuickCastDisabled() { return quickCastDisabled; } public boolean isManualQuickCastDisabled() { return manualQuickCastDisabled; } @Override public boolean isQuickCast() { return quickCast; } public WandMode getMode() { return mode; } public WandMode getBrushMode() { return brushMode; } public void setMode(WandMode mode) { this.mode = mode; } public void setBrushMode(WandMode mode) { this.brushMode = mode; } @Override public boolean showCastMessages() { return quietLevel == 0; } @Override public boolean showMessages() { return quietLevel < 2; } public boolean isStealth() { return quietLevel > 2; } @Override public void setPath(String path) { String oldPath = this.path; this.path = path; setProperty("path", path); // Handle the case of a path upgrade meaning there are suddenly more spells or brushes available boolean updateInventory = limitBrushesToPath || limitSpellsToPath; if (!oldPath.equals(path) && updateInventory) { closeInventory(); if (limitSpellsToPath) { loadSpells(); } if (limitBrushesToPath) { loadBrushes(); } buildInventory(); } } /* * Public API Implementation */ @Override public boolean isLost(com.elmakers.mine.bukkit.api.wand.LostWand lostWand) { return this.id != null && this.id.equals(lostWand.getId()); } @Override public LostWand makeLost(Location location) { checkId(); saveState(); return new LostWand(this, location); } protected void showActiveIcon(boolean show) { if (this.icon == null || this.inactiveIcon == null || this.inactiveIcon.getMaterial() == Material.AIR || this.inactiveIcon.getMaterial() == null) return; if (this.icon.getMaterial() == Material.AIR || this.icon.getMaterial() == null) { this.icon.setMaterial(DefaultWandMaterial); } if (show) { if (inactiveIconDelay > 0) { Plugin plugin = controller.getPlugin(); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { findItem(); icon.applyToItem(item); } }, inactiveIconDelay * 20 / 1000); } else { findItem(); icon.applyToItem(item); } } else { findItem(); inactiveIcon.applyToItem(this.item); } } public boolean activateOffhand(Mage mage) { return activate(mage, true); } @Override @Deprecated public void activate(com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage instanceof Mage) { activate((Mage)mage); } } public boolean activate(Mage mage) { return activate(mage, false); } public boolean activate(Mage mage, boolean offhand) { if (mage == null) return false; Player player = mage.getPlayer(); if (player == null) return false; if (!controller.hasWandPermission(player, this)) return false; InventoryView openInventory = player.getOpenInventory(); InventoryType inventoryType = openInventory.getType(); if (inventoryType == InventoryType.ENCHANTING || inventoryType == InventoryType.ANVIL) return false; if (hasUses && uses <= 0) { if (offhand) { player.getInventory().setItemInOffHand(new ItemStack(Material.AIR, 1)); } else { player.getInventory().setItemInMainHand(new ItemStack(Material.AIR, 1)); } return false; } if (!canUse(player)) { mage.sendMessage(getMessage("bound").replace("$name", getOwner())); return false; } if (this.isUpgrade) { controller.getLogger().warning("Activated an upgrade item- this shouldn't happen"); return false; } WandPreActivateEvent preActivateEvent = new WandPreActivateEvent(mage, this); Bukkit.getPluginManager().callEvent(preActivateEvent); if (preActivateEvent.isCancelled()) { return false; } boolean needsSave = false; if (hasId) { needsSave = this.checkId() || needsSave; } else { setProperty("id", null); } this.mage = mage; this.isInOffhand = offhand; this.heldSlot = offhand ? OFFHAND_SLOT : player.getInventory().getHeldItemSlot(); if (mageClassKeys != null && !mageClassKeys.isEmpty()) { MageClass mageClass = null; for (String mageClassKey : mageClassKeys) { mageClass = mage.getClass(mageClassKey); if (mageClass != null) break; } if (mageClass == null) { Integer lastSlot = mage.getLastActivatedSlot(); if (!offhand && (lastSlot == null || lastSlot != player.getInventory().getHeldItemSlot())) { mage.setLastActivatedSlot(player.getInventory().getHeldItemSlot()); mage.sendMessage(controller.getMessages().get("mage.no_class").replace("$name", getName())); } return false; } setMageClass(mageClass); if (!offhand) { mage.setActiveClass(mageClass.getKey()); } } MageParameters wrapped = new MageParameters(mage); wrapped.wrap(configuration); load(wrapped); // This double-load here is not really ideal. // Seems hard to prevent without merging Wand construction and activation, though. loadProperties(); mage.setLastActivatedSlot(player.getInventory().getHeldItemSlot()); // Check for replacement template String replacementTemplate = getString("replace_on_activate", ""); if (!replacementTemplate.isEmpty() && !replacementTemplate.equals(template)) { playEffects("replace"); setTemplate(replacementTemplate); loadProperties(); saveState(); return activate(mage, offhand); } // Since these wands can't be opened we will just show them as open when held // We have to delay this 1 tick so it happens after the Mage has accepted the Wand if ((getMode() != WandMode.INVENTORY || offhand) && controller.isInitialized()) { Plugin plugin = controller.getPlugin(); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { showActiveIcon(true); playPassiveEffects("open"); } }, 1); } // Check for an empty wand and auto-fill if (!isUpgrade && (controller.fillWands() || autoFill)) { fill(mage.getPlayer(), controller.getMaxWandFillLevel()); needsSave = true; } if (isHeroes) { HeroesManager heroes = controller.getHeroes(); if (heroes != null) { Set<String> skills = heroes.getSkills(player); Collection<String> currentSpells = new ArrayList<>(getSpells()); for (String spellKey : currentSpells) { if (spellKey.startsWith("heroes*") && !skills.contains(spellKey.substring(7))) { removeSpell(spellKey); } } // Hack to prevent messaging this.mage = null; for (String skillKey : skills) { String heroesKey = "heroes*" + skillKey; if (!spells.contains(heroesKey)) { addSpell(heroesKey); } } this.mage = mage; } } // Check for auto-organize if (autoOrganize && !isUpgrade) { organizeInventory(mage); needsSave = true; } // Check for auto-alphabetize if (autoAlphabetize && !isUpgrade) { alphabetizeInventory(); needsSave = true; } boolean forceUpdate = false; if (checkInventoryForUpgrades()) { forceUpdate = true; needsSave = true; } // Check for auto-bind if (bound) { String mageName = ChatColor.stripColor(mage.getPlayer().getDisplayName()); String mageId = mage.getId(); boolean ownerRenamed = owner != null && ownerId != null && ownerId.equals(mageId) && !owner.equals(mageName); if (ownerId == null || ownerId.length() == 0 || owner == null || ownerRenamed) { takeOwnership(mage.getPlayer()); needsSave = true; } } // Check for randomized wands if (randomizeOnActivate) { randomize(); randomizeOnActivate = false; forceUpdate = true; needsSave = true; } // Don't build the inventory until activated so we can take Mage boosts into account if (offhand) { mage.setOffhandWand(this); } else { mage.setActiveWand(this); } buildInventory(); updateMaxMana(false); tick(); if (!isInOffhand) { updateMana(); } checkActiveMaterial(); if (needsSave) { saveState(); } updateActiveMaterial(); updateName(); updateLore(); // Play activate FX playPassiveEffects("activate"); lastSoundEffect = 0; lastParticleEffect = 0; lastSpellCast = 0; if (forceUpdate) { DeprecatedUtils.updateInventory(player); } return true; } public boolean checkInventoryForUpgrades() { boolean updated = false; Player player = mage == null ? null : mage.getPlayer(); if (player == null || mage.hasStoredInventory()) return false; // Check for spell or other special icons in the player's inventory Inventory inventory = player.getInventory(); ItemStack[] items = inventory.getContents(); for (int i = 0; i < items.length; i++) { ItemStack item = items[i]; if (addItem(item)) { inventory.setItem(i, null); updated = true; } } return updated; } private void setOpenInventoryPage(int page) { this.openInventoryPage = page; this.setProperty("page", page); } @Override public boolean organizeInventory() { if (mage != null) { return organizeInventory(mage); } return false; } @Override public boolean organizeInventory(com.elmakers.mine.bukkit.api.magic.Mage mage) { WandOrganizer organizer = new WandOrganizer(this, mage); closeInventory(); organizer.organize(); setOpenInventoryPage(0); setCurrentHotbar(currentHotbar); if (autoOrganize) setProperty("organize", false); autoOrganize = false; updateSpellInventory(); updateBrushInventory(); if (this.mage != null) { buildInventory(); } return true; } @Override public boolean alphabetizeInventory() { WandOrganizer organizer = new WandOrganizer(this); closeInventory(); organizer.alphabetize(); setOpenInventoryPage(0); setCurrentHotbar(0); if (autoAlphabetize) setProperty("alphabetize", false); autoAlphabetize = false; updateSpellInventory(); updateBrushInventory(); if (mage != null) { buildInventory(); } return true; } @Override public com.elmakers.mine.bukkit.api.wand.Wand duplicate() { ItemStack newItem = InventoryUtils.getCopy(item); Wand newWand = controller.getWand(newItem); newWand.saveState(); return newWand; } @Override @Deprecated public boolean configure(Map<String, Object> properties) { Map<Object, Object> convertedProperties = new HashMap<>(properties); configure(ConfigurationUtils.toConfigurationSection(convertedProperties)); return true; } @Override public void updated() { if (suspendUpdate) return; loadProperties(); if (mage != null) { buildInventory(); if (isInventoryOpen()) { updateInventory(); } } saveState(); updateMaxMana(false); updateName(); updateLore(); } @Override public boolean isLocked() { return this.locked; } @Override public boolean upgradesAllowed() { return !this.locked || this.lockedAllowUpgrades; } @Override public void unlock() { locked = false; setProperty("locked", false); } public boolean isPassive() { return passive; } @Override public boolean canUse(Player player) { if (!bound || ownerId == null || ownerId.length() == 0) return true; if (controller.hasPermission(player, "Magic.wand.override_bind", false)) return true; String playerId = controller.getMageIdentifier().fromEntity(player); if (ownerId.equalsIgnoreCase(playerId)) { return true; } // Fall back to checking the UUID rather than the mage ID // This can be removed when all AMC wands have been migrated return ownerId.equals(player.getUniqueId().toString()); } @Override public boolean addSpell(String spellName) { if (!isModifiable()) return false; return forceAddSpell(spellName); } @Override public boolean forceAddSpell(String spellName) { SpellTemplate template = controller.getSpellTemplate(spellName); if (template == null) { return false; } SpellKey spellKey = template.getSpellKey(); if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } suspendUpdate = true; if (!super.addSpell(spellName)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); ItemStack spellItem = createSpellItem(spellKey.getKey()); if (spellItem == null) { return false; } int level = spellKey.getLevel(); int inventoryCount = inventories.size(); int spellCount = spells.size(); // Look for existing spells for spell upgrades Integer inventorySlot = spellInventory.get(spellKey.getBaseKey()); clearSlot(inventorySlot); setSpellLevel(spellKey.getBaseKey(), level); spells.add(spellKey.getBaseKey()); if (activeSpell == null || activeSpell.isEmpty()) { setActiveSpell(spellKey.getBaseKey()); } addToInventory(spellItem, inventorySlot); checkSpellLevelsAndInventory(); updateInventory(); updateHasInventory(); saveState(); updateLore(); if (mage != null) { if (spells.size() != spellCount) { if (spellCount == 0) { if (leftClickAction == WandAction.CAST) { String message = getMessage("spell_instructions", "").replace("$wand", getName()); mage.sendMessage(message.replace("$spell", template.getName())); } } else if (spellCount == 1) { String controlKey = getControlKey(WandAction.TOGGLE); String inventoryMessage = null; switch (getMode()) { case INVENTORY: inventoryMessage = "inventory_instructions"; break; case CHEST: inventoryMessage = "chest_instructions"; break; case SKILLS: inventoryMessage = "skills_instructions"; break; case CYCLE: inventoryMessage = "cycle_instructions"; if (controlKey == null) { controlKey = getControlKey(WandAction.CYCLE); } break; case CAST: case NONE: // Ignore break; } if (controlKey != null && inventoryMessage != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage(inventoryMessage, "") .replace("$wand", getName()).replace("$toggle", controlKey).replace("$cycle", controlKey)); } } if (inventoryCount == 1 && inventories.size() > 1) { mage.sendMessage(getMessage("page_instructions", "").replace("$wand", getName())); } } } return true; } /** * Covers the special case of a wand having spell levels and inventory slots that came from configs, * but now we've modified the spells list and need to figure out if we also need to pesist the levels and * slots separately. * * <p>This should all be moved to CasterProperties at some point to handle the same sort of issues with mage class * configs. */ private void checkSpellLevelsAndInventory() { if (!spellLevels.isEmpty()) { MagicProperties storage = getStorage("spell_levels"); if (storage == null || storage == this) { if (!configuration.contains("spell_levels")) { configuration.set("spell_levels", spellLevels); } } } if (!spellInventory.isEmpty()) { MagicProperties storage = getStorage("spell_inventory"); if (storage == null || storage == this) { if (!configuration.contains("spell_inventory")) { configuration.set("spell_inventory", spellInventory); } } } } private void clearSlot(Integer slot) { if (slot != null) { Inventory inventory = getInventory(slot); slot = getInventorySlot(slot); inventory.setItem(slot, null); } } @Override public String getMessage(String messageKey, String defaultValue) { String message = super.getMessage(messageKey, defaultValue); // Some special-casing here, not sure how to avoid. if (messageKey.equals("hotbar_count_usage")) { String controlKey = getControlKey(WandAction.CYCLE_HOTBAR); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); message = message.replace("$cycle_hotbar", controlKey); } else { return ""; } } return message; } @Override protected String getMessageKey(String key) { String wandKey = "wands." + template + "." + key; if (template != null && !template.isEmpty() && controller.getMessages().containsKey(wandKey)) { return wandKey; } return "wand." + key; } @Override protected String parameterizeMessage(String message) { return message.replace("$wand", getName()); } @Override public boolean hasBrush(String materialKey) { if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } return getBrushes().contains(materialKey); } @Override public boolean hasSpell(String spellName) { return hasSpell(new SpellKey(spellName)); } @Override public boolean hasSpell(SpellKey spellKey) { if (!spells.contains(spellKey.getBaseKey())) return false; if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } int level = getSpellLevel(spellKey.getBaseKey()); return (level >= spellKey.getLevel()); } @Override public boolean addBrush(String materialKey) { if (!isModifiable()) return false; if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } suspendUpdate = true; if (!super.addBrush(materialKey)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); ItemStack itemStack = createBrushIcon(materialKey); if (itemStack == null) return false; int inventoryCount = inventories.size(); int brushCount = brushes.size(); brushInventory.put(materialKey, null); brushes.add(materialKey); addToInventory(itemStack); if (activeBrush == null || activeBrush.length() == 0) { activateBrush(materialKey); } else { updateInventory(); } updateHasInventory(); saveState(); updateLore(); if (mage != null) { if (brushCount == 0) { String controlKey = getControlKey(WandAction.TOGGLE); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage("brush_instructions") .replace("$wand", getName()).replace("$toggle", controlKey)); } } if (inventoryCount == 1 && inventories.size() > 1) { mage.sendMessage(getMessage("page_instructions").replace("$wand", getName())); } } return true; } @Override public void setActiveBrush(String materialKey) { activateBrush(materialKey); if (materialKey == null || mage == null) { return; } com.elmakers.mine.bukkit.api.block.MaterialBrush brush = mage.getBrush(); if (brush == null) { return; } boolean eraseWasActive = brush.isEraseModifierActive(); brush.activate(mage.getLocation(), materialKey); BrushMode mode = brush.getMode(); if (mode == BrushMode.CLONE) { mage.sendMessage(getMessage("clone_material_activated")); } else if (mode == BrushMode.REPLICATE) { mage.sendMessage(getMessage("replicate_material_activated")); } if (!eraseWasActive && brush.isEraseModifierActive()) { mage.sendMessage(getMessage("erase_modifier_activated")); } } public void setActiveBrush(ItemStack itemStack) { if (!isBrush(itemStack)) return; setActiveBrush(getBrush(itemStack)); } public void activateBrush(String materialKey) { this.activeBrush = materialKey; setProperty("active_brush", this.activeBrush); saveState(); updateActiveMaterial(); updateName(); updateHotbar(); } @Override public void setActiveSpell(String activeSpell) { if (activeSpell != null) { SpellKey spellKey = new SpellKey(activeSpell); this.activeSpell = spellKey.getBaseKey(); } else { this.activeSpell = null; } checkActiveSpell(); setProperty("active_spell", this.activeSpell); saveState(); updateName(); } protected void checkActiveSpell() { // Support wands with just an active spell and no spells list if (activeSpell != null && !spells.isEmpty() && !spells.contains(activeSpell)) { activeSpell = null; } } @Override public boolean removeBrush(String materialKey) { if (!isModifiable() || materialKey == null) return false; if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } suspendUpdate = true; if (!removeBrush(materialKey)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); if (materialKey.equals(activeBrush)) { activeBrush = null; } clearSlot(brushInventory.get(materialKey)); brushInventory.remove(materialKey); boolean found = brushes.remove(materialKey); if (activeBrush == null && brushes.size() > 0) { activeBrush = brushes.iterator().next(); } updateActiveMaterial(); updateInventory(); updateBrushInventory(); saveState(); updateName(); updateLore(); return found; } @Override public boolean removeSpell(String spellName) { if (!isModifiable()) return false; SpellKey spellKey = new SpellKey(spellName); if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } suspendUpdate = true; if (!super.removeSpell(spellName)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); if (activeSpell != null) { SpellKey activeKey = new SpellKey(activeSpell); if (spellKey.getBaseKey().equals(activeKey.getBaseKey())) { setActiveSpell(null); } } clearSlot(spellInventory.get(spellKey.getBaseKey())); spells.remove(spellKey.getBaseKey()); spellLevels.remove(spellKey.getBaseKey()); spellInventory.remove(spellKey.getBaseKey()); if (activeSpell == null && spells.size() > 0) { setActiveSpell(spells.iterator().next()); } checkSpellLevelsAndInventory(); updateInventory(); updateHasInventory(); updateSpellInventory(); saveState(); updateName(); updateLore(); return true; } @Override public Map<String, String> getOverrides() { return castOverrides == null ? new HashMap<>() : new HashMap<>(castOverrides); } @Override public void setOverrides(Map<String, String> overrides) { if (overrides == null) { this.castOverrides = null; } else { this.castOverrides = new HashMap<>(overrides); } updateOverrides(); } @Override public void removeOverride(String key) { if (castOverrides != null) { castOverrides.remove(key); updateOverrides(); } } @Override public void setOverride(String key, String value) { if (castOverrides == null) { castOverrides = new HashMap<>(); } if (value == null || value.length() == 0) { castOverrides.remove(key); } else { castOverrides.put(key, value); } updateOverrides(); } @Override public boolean addOverride(String key, String value) { if (castOverrides == null) { castOverrides = new HashMap<>(); } boolean modified = false; if (value == null || value.length() == 0) { modified = castOverrides.containsKey(key); castOverrides.remove(key); } else { String current = castOverrides.get(key); modified = current == null || !current.equals(value); castOverrides.put(key, value); } if (modified) { updateOverrides(); } return modified; } protected void updateOverrides() { if (castOverrides != null && !castOverrides.isEmpty()) { setProperty("overrides", castOverrides); } else { setProperty("overrides", null); } } public boolean hasStoredInventory() { return storedInventory != null; } public Inventory getStoredInventory() { return storedInventory; } public boolean addToStoredInventory(ItemStack item) { if (storedInventory == null) { return false; } HashMap<Integer, ItemStack> remainder = storedInventory.addItem(item); return remainder.size() == 0; } public void setHeldSlot(int slot) { this.heldSlot = slot; } public boolean storeInventory() { if (storedInventory != null) { if (mage != null) { mage.sendMessage("Your wand contains a previously stored inventory and will not activate, let go of it to clear."); } controller.getLogger().warning("Tried to store an inventory with one already present: " + (mage == null ? "?" : mage.getName())); return false; } Player player = mage.getPlayer(); if (player == null) { return false; } PlayerInventory inventory = player.getInventory(); storedInventory = CompatibilityUtils.createInventory(null, PLAYER_INVENTORY_SIZE, "Stored Inventory"); for (int i = 0; i < PLAYER_INVENTORY_SIZE; i++) { ItemStack item = inventory.getItem(i); storedInventory.setItem(i, item); if (i != heldSlot) { inventory.setItem(i, null); } } return true; } public boolean restoreInventory() { if (storedInventory == null) { return false; } Player player = mage.getPlayer(); if (player == null) { return false; } PlayerInventory inventory = player.getInventory(); for (int i = 0; i < storedInventory.getSize(); i++) { if (i != heldSlot) { inventory.setItem(i, storedInventory.getItem(i)); } } storedInventory = null; inventory.setHeldItemSlot(heldSlot); return true; } @Override @Deprecated public boolean isSoul() { return false; } public static boolean isBound(ItemStack item) { Object wandSection = InventoryUtils.getNode(item, WAND_KEY); if (wandSection == null) return false; String boundValue = InventoryUtils.getMetaString(wandSection, "owner_id"); return boundValue != null; } @Override public boolean isBound() { return bound; } @Nullable @Override public SpellTemplate getSpellTemplate(String spellKey) { SpellKey key = new SpellKey(spellKey); spellKey = key.getBaseKey(); if (!spells.contains(spellKey)) return null; Integer level = spellLevels.get(spellKey); if (level != null) { spellKey = new SpellKey(spellKey, level).getKey(); } return controller.getSpellTemplate(spellKey); } private void setSpellLevel(String spellKey, int level) { if (level <= 1) { spellLevels.remove(spellKey); } else { spellLevels.put(spellKey, level); } } @Override public int getSpellLevel(String spellKey) { Integer level = spellLevels.get(spellKey); return level == null ? 1 : level; } @Override public MageController getController() { return controller; } protected Map<String, Integer> getSpellInventory() { return new HashMap<>(spellInventory); } protected Map<String, Integer> getBrushInventory() { return new HashMap<>(brushInventory); } public Map<PotionEffectType, Integer> getPotionEffects() { return potionEffects; } @Override public float getHealthRegeneration() { Integer level = potionEffects.get(PotionEffectType.REGENERATION); return level != null && level > 0 ? (float)level : 0; } @Override public float getHungerRegeneration() { Integer level = potionEffects.get(PotionEffectType.SATURATION); return level != null && level > 0 ? (float)level : 0; } @Nullable @Override public WandTemplate getTemplate() { if (template == null || template.isEmpty()) return null; return controller.getWandTemplate(template); } public boolean playPassiveEffects(String effects) { WandTemplate wandTemplate = getTemplate(); if (wandTemplate != null && mage != null) { boolean offhandActive = mage.setOffhandActive(isInOffhand); boolean result = false; try { result = wandTemplate.playEffects(this, effects); } catch (Exception ex) { result = false; controller.getLogger().log(Level.WARNING, "Error playing effects " + effects + " from wand " + template, ex); } mage.setOffhandActive(offhandActive); return result; } return false; } @Override public boolean playEffects(String effects) { if (activeEffectsOnly && !inventoryIsOpen) { return false; } return playPassiveEffects(effects); } @Override public WandAction getDropAction() { return dropAction; } @Override public WandAction getRightClickAction() { return rightClickAction; } @Override public WandAction getLeftClickAction() { return leftClickAction; } @Override public WandAction getSwapAction() { return swapAction; } @Override public boolean performAction(WandAction action) { WandMode mode = getMode(); switch (action) { case CAST: cast(); break; case ALT_CAST: alternateCast(); break; case ALT_CAST2: alternateCast2(); break; case TOGGLE: if (mode == WandMode.CYCLE) { cycleActive(1); return true; } if (mode != WandMode.CHEST && mode != WandMode.INVENTORY && mode != WandMode.SKILLS) return false; toggleInventory(); break; case CYCLE: cycleActive(1); break; case CYCLE_REVERSE: cycleActive(-1); break; case CYCLE_HOTBAR: if (mode != WandMode.INVENTORY || !isInventoryOpen()) return false; if (getHotbarCount() > 1) { cycleHotbar(1); } else { closeInventory(); } break; case CYCLE_HOTBAR_REVERSE: if (mode != WandMode.INVENTORY) return false; if (getHotbarCount() > 1) { cycleHotbar(-1); } else if (isInventoryOpen()) { closeInventory(); } else { return false; } break; default: return false; } return true; } @Override public boolean checkAndUpgrade(boolean quiet) { WandUpgradePath path = getPath(); WandUpgradePath nextPath = path != null ? path.getUpgrade() : null; if (nextPath == null) { return true; } if (canProgress()) { return true; } if (!path.checkUpgradeRequirements(this, quiet ? null : mage)) { return false; } path.upgrade(this, mage); return true; } @Override public boolean hasUpgrade() { WandUpgradePath path = getPath(); return path != null && path.hasUpgrade(); } @Override public boolean checkUpgrade(boolean quiet) { WandUpgradePath path = getPath(); return path == null || !path.hasUpgrade() ? false : path.checkUpgradeRequirements(this, quiet ? null : mage); } @Override @Deprecated public boolean upgrade(Map<String, Object> properties) { Map<Object, Object> convertedProperties = new HashMap<>(properties); return upgrade(ConfigurationUtils.toConfigurationSection(convertedProperties)); } @Override public boolean upgrade(boolean quiet) { WandUpgradePath path = getPath(); if (path == null) return false; path.upgrade(this, quiet ? null : mage); return true; } @Override public boolean isBlocked(double angle) { if (mage == null) return false; if (blockChance == 0) return false; if (blockFOV > 0 && angle > blockFOV) return false; long lastBlock = mage.getLastBlockTime(); if (blockCooldown > 0 && lastBlock > 0 && lastBlock + blockCooldown > System.currentTimeMillis()) return false; boolean isBlocked = Math.random() <= blockChance; if (isBlocked) { playEffects("spell_blocked"); mage.setLastBlockTime(System.currentTimeMillis()); } return isBlocked; } @Override public boolean isReflected(double angle) { if (mage == null) return false; if (blockReflectChance == 0) return false; if (blockFOV > 0 && angle > blockFOV) return false; long lastBlock = mage.getLastBlockTime(); if (blockCooldown > 0 && lastBlock > 0 && lastBlock + blockCooldown > System.currentTimeMillis()) return false; boolean isReflected = Math.random() <= blockReflectChance; if (isReflected) { playEffects("spell_reflected"); if (mage != null) mage.setLastBlockTime(System.currentTimeMillis()); } return isReflected; } @Nullable @Override public Location getLocation() { if (mage == null) { return null; } Location wandLocation = mage.getEyeLocation(); wandLocation = mage.getOffsetLocation(wandLocation, isInOffhand, castLocation == null ? DEFAULT_CAST_OFFSET : castLocation); return wandLocation; } @Nullable @Override public Mage getMage() { return mage; } @Override public @Nullable MageClass getMageClass() { return mageClass; } @Override public @Nullable String getMageClassKey() { if (mageClass != null) { return mageClass.getKey(); } return mageClassKeys == null || mageClassKeys.isEmpty() ? null : mageClassKeys.get(0); } public void setCurrentHotbar(int hotbar) { this.currentHotbar = hotbar; setProperty("hotbar", currentHotbar); } public int getInventorySize() { WandMode mode = getMode(); if (mode == WandMode.CHEST || mode == WandMode.SKILLS) { return 9 * inventoryRows; } return INVENTORY_SIZE; } public boolean usesCurrency() { if (currencyDisplay == null || !hasSpellProgression || earnMultiplier <= 0 || !currencyDisplay.isValid()) return false; if (currencyDisplay.getKey().equals("sp") && !controller.isSPEarnEnabled()) return false; return true; } public boolean usesCurrency(String type) { return usesCurrency() && currencyDisplay.getKey().equals(type); } public boolean usesSP() { return controller.isSPEarnEnabled() && usesCurrency("sp"); } @Override public int getHeldSlot() { return heldSlot; } @Nullable @Override protected BaseMagicConfigurable getStorage(MagicPropertyType propertyType) { switch (propertyType) { case WAND: return this; case SUBCLASS: return mageClass; case CLASS: if (mageClass == null) { if (mage == null) { controller.getLogger().warning("Something is trying to modify a wand when it's not held, this may not work out"); } else { controller.getLogger().warning("Something is trying to modify a wand that has no class, this may not work out"); } Thread.dumpStack(); } return mageClass == null ? null : mageClass.getRoot(); case MAGE: if (mage == null) { controller.getLogger().warning("Something is trying to modify a wand when it's not held, this may not work out"); Thread.dumpStack(); } return mage == null ? null : mage.getProperties(); } return null; } @Override public boolean isPlayer() { return mage == null ? false : mage.isPlayer(); } @Nullable @Override public Player getPlayer() { return mage == null ? null : mage.getPlayer(); } @Override @Nonnull public WandEffectContext getEffectContext() { if (effectContext == null || (effectContext.getMage() != mage)) { // Lazy load or mage has changed effectContext = new WandEffectContext(mage, this); } return verifyNotNull(effectContext); } @Override public Wand getWand() { return this; } @Override public boolean isInOffhand() { return isInOffhand; } }
Magic/src/main/java/com/elmakers/mine/bukkit/wand/Wand.java
package com.elmakers.mine.bukkit.wand; import static com.google.common.base.Verify.verifyNotNull; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Random; import java.util.Set; import java.util.UUID; import java.util.logging.Level; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; import org.bukkit.Bukkit; import org.bukkit.ChatColor; import org.bukkit.Color; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.Particle; import org.bukkit.block.BlockFace; import org.bukkit.command.CommandSender; import org.bukkit.configuration.ConfigurationSection; import org.bukkit.configuration.MemoryConfiguration; import org.bukkit.entity.Entity; import org.bukkit.entity.Player; import org.bukkit.event.inventory.InventoryType; import org.bukkit.inventory.Inventory; import org.bukkit.inventory.InventoryView; import org.bukkit.inventory.ItemStack; import org.bukkit.inventory.PlayerInventory; import org.bukkit.plugin.Plugin; import org.bukkit.potion.PotionEffectType; import org.bukkit.util.Vector; import com.elmakers.mine.bukkit.api.block.BrushMode; import com.elmakers.mine.bukkit.api.economy.Currency; import com.elmakers.mine.bukkit.api.event.WandPreActivateEvent; import com.elmakers.mine.bukkit.api.item.ItemData; import com.elmakers.mine.bukkit.api.magic.MageClassTemplate; import com.elmakers.mine.bukkit.api.magic.MageController; import com.elmakers.mine.bukkit.api.magic.MagicProperties; import com.elmakers.mine.bukkit.api.magic.MaterialSet; import com.elmakers.mine.bukkit.api.magic.Messages; import com.elmakers.mine.bukkit.api.spell.CostReducer; import com.elmakers.mine.bukkit.api.spell.Spell; import com.elmakers.mine.bukkit.api.spell.SpellKey; import com.elmakers.mine.bukkit.api.spell.SpellTemplate; import com.elmakers.mine.bukkit.api.wand.WandAction; import com.elmakers.mine.bukkit.block.MaterialAndData; import com.elmakers.mine.bukkit.block.MaterialBrush; import com.elmakers.mine.bukkit.effect.EffectPlayer; import com.elmakers.mine.bukkit.effect.SoundEffect; import com.elmakers.mine.bukkit.effect.WandEffectContext; import com.elmakers.mine.bukkit.effect.builtin.EffectRing; import com.elmakers.mine.bukkit.heroes.HeroesManager; import com.elmakers.mine.bukkit.magic.BaseMagicConfigurable; import com.elmakers.mine.bukkit.magic.Mage; import com.elmakers.mine.bukkit.magic.MageClass; import com.elmakers.mine.bukkit.magic.MageParameters; import com.elmakers.mine.bukkit.magic.MagicAttribute; import com.elmakers.mine.bukkit.magic.MagicController; import com.elmakers.mine.bukkit.magic.MagicPropertyType; import com.elmakers.mine.bukkit.utility.ColorHD; import com.elmakers.mine.bukkit.utility.CompatibilityUtils; import com.elmakers.mine.bukkit.utility.ConfigurationUtils; import com.elmakers.mine.bukkit.utility.DeprecatedUtils; import com.elmakers.mine.bukkit.utility.InventoryUtils; import com.elmakers.mine.bukkit.utility.NMSUtils; import com.google.common.base.Preconditions; public class Wand extends WandProperties implements CostReducer, com.elmakers.mine.bukkit.api.wand.Wand { public static final int OFFHAND_SLOT = 40; public static final int INVENTORY_SIZE = 27; public static final int PLAYER_INVENTORY_SIZE = 36; public static final int INVENTORY_ORGANIZE_BUFFER = 4; public static final int HOTBAR_SIZE = 9; public static final int HOTBAR_INVENTORY_SIZE = HOTBAR_SIZE - 1; public static final float DEFAULT_SPELL_COLOR_MIX_WEIGHT = 0.0001f; public static boolean FILL_CREATOR = false; public static Vector DEFAULT_CAST_OFFSET = new Vector(0, 0, 0.5); public static String DEFAULT_WAND_TEMPLATE = "default"; private static final String[] EMPTY_PARAMETERS = new String[0]; private static final Random random = new Random(); /** * The item as it appears in the inventory of the player. */ protected @Nullable ItemStack item; /** * The currently active mage. * * <p>Is only set when the wand is active or when the wand is * used for off-hand casting. */ protected @Nullable Mage mage; protected @Nullable WandEffectContext effectContext; // Cached state private String id = ""; private List<Inventory> hotbars; private List<Inventory> inventories; private Map<String, Integer> spellInventory = new HashMap<>(); private Set<String> spells = new LinkedHashSet<>(); private Map<String, Integer> spellLevels = new HashMap<>(); private Map<String, Integer> brushInventory = new HashMap<>(); private Set<String> brushes = new LinkedHashSet<>(); private String activeSpell = ""; private String alternateSpell = ""; private String alternateSpell2 = ""; private String activeBrush = ""; protected String wandName = ""; protected String description = ""; private String owner = ""; private String ownerId = ""; private String template = ""; private String path = ""; private List<String> mageClassKeys = null; private boolean superProtected = false; private boolean superPowered = false; private boolean glow = false; private boolean bound = false; private boolean indestructible = false; private boolean undroppable = false; private boolean keep = false; private boolean passive = false; private boolean autoOrganize = false; private boolean autoAlphabetize = false; private boolean autoFill = false; private boolean isUpgrade = false; private boolean randomizeOnActivate = true; private boolean rename = false; private boolean renameDescription = false; private boolean quickCast = false; private boolean quickCastDisabled = false; private boolean manualQuickCastDisabled = false; private boolean isInOffhand = false; private boolean hasId = false; private boolean suspendUpdate = false; private int inventoryRows = 1; private Vector castLocation; private WandAction leftClickAction = WandAction.NONE; private WandAction rightClickAction = WandAction.NONE; private WandAction dropAction = WandAction.NONE; private WandAction swapAction = WandAction.NONE; private MaterialAndData icon = null; private MaterialAndData upgradeIcon = null; private MaterialAndData inactiveIcon = null; private int inactiveIconDelay = 0; private String upgradeTemplate = null; protected float consumeReduction = 0; protected float cooldownReduction = 0; protected float costReduction = 0; protected Map<String, Double> protection; private float power = 0; private float earnMultiplier = 1; private float blockFOV = 0; private float blockChance = 0; private float blockReflectChance = 0; private int blockMageCooldown = 0; private int blockCooldown = 0; private int maxEnchantCount = 0; private int enchantCount = 0; private boolean hasInventory = false; private boolean locked = false; private boolean lockedAllowUpgrades = false; private boolean forceUpgrade = false; private boolean isHeroes = false; private int uses = 0; private boolean hasUses = false; private boolean isSingleUse = false; private boolean limitSpellsToPath = false; private boolean limitBrushesToPath = false; private Currency currencyDisplay = null; private float manaPerDamage = 0; private ColorHD effectColor = null; private float effectColorSpellMixWeight = DEFAULT_SPELL_COLOR_MIX_WEIGHT; private Particle effectParticle = null; private float effectParticleData = 0; private int effectParticleCount = 0; private int effectParticleInterval = 0; private double effectParticleMinVelocity = 0; private double effectParticleRadius = 0; private double effectParticleOffset = 0; private boolean effectBubbles = false; private boolean activeEffectsOnly = false; private EffectRing effectPlayer = null; private int castInterval = 0; private double castMinVelocity = 0; private Vector castVelocityDirection = null; private String castSpell = null; private ConfigurationSection castParameters = null; private Map<PotionEffectType, Integer> potionEffects = new HashMap<>(); private SoundEffect effectSound = null; private int effectSoundInterval = 0; private int quietLevel = 0; private Map<String, String> castOverrides = null; // Transient state private boolean hasSpellProgression = false; private long lastSoundEffect; private long lastParticleEffect; private long lastSpellCast; // Inventory functionality private WandMode mode = null; private WandMode brushMode = null; private int openInventoryPage = 0; private boolean inventoryIsOpen = false; private boolean inventoryWasOpen = false; private Inventory displayInventory = null; private int currentHotbar = 0; public static WandManaMode manaMode = WandManaMode.BAR; public static WandManaMode currencyMode = WandManaMode.NUMBER; public static boolean regenWhileInactive = true; public static Material DefaultUpgradeMaterial = Material.NETHER_STAR; public static Material DefaultWandMaterial = Material.BLAZE_ROD; public static Material EnchantableWandMaterial = null; public static boolean SpellGlow = false; public static boolean BrushGlow = false; public static boolean BrushItemGlow = true; public static boolean LiveHotbar = true; public static boolean LiveHotbarSkills = false; public static boolean LiveHotbarCooldown = true; public static boolean Unbreakable = false; public static boolean Unstashable = true; public static SoundEffect inventoryOpenSound = null; public static SoundEffect inventoryCloseSound = null; public static SoundEffect inventoryCycleSound = null; public static SoundEffect noActionSound = null; public static SoundEffect itemPickupSound = null; public static String WAND_KEY = "wand"; public static String UPGRADE_KEY = "wand_upgrade"; public static String WAND_SELF_DESTRUCT_KEY = null; public static byte HIDE_FLAGS = 63; public static String brushSelectSpell = ""; private Inventory storedInventory = null; private int heldSlot = 0; public Wand(MagicController controller) { super(controller); hotbars = new ArrayList<>(); inventories = new ArrayList<>(); } /** * @deprecated Use {@link MagicController#getWand(ItemStack)}. */ @Deprecated public Wand(MagicController controller, ItemStack itemStack) { this(controller); Preconditions.checkNotNull(itemStack); if (itemStack.getType() == Material.AIR) { itemStack.setType(DefaultWandMaterial); } this.icon = new MaterialAndData(itemStack); item = itemStack; boolean needsSave = false; boolean isWand = isWand(item); boolean isUpgradeItem = isUpgrade(item); if (isWand || isUpgradeItem) { ConfigurationSection wandConfig = itemToConfig(item, new MemoryConfiguration()); // Check for template migration WandTemplate wandTemplate = controller.getWandTemplate(wandConfig.getString("template")); WandTemplate migrateTemplate = wandTemplate == null ? null : wandTemplate.getMigrateTemplate(); if (migrateTemplate != null) { wandConfig.set("template", migrateTemplate.getKey()); } // Check for wand data migration int version = wandConfig.getInt("version", 0); if (version < CURRENT_VERSION) { // Migration will be handled by CasterProperties, this is just here // So that we save the data after to avoid re-migrating. needsSave = true; } randomizeOnActivate = !wandConfig.contains("icon"); load(wandConfig); } else { updateIcon(); needsSave = true; } loadProperties(); // Migrate old upgrade items if ((isUpgrade || isUpgradeItem) && isWand) { needsSave = true; InventoryUtils.removeMeta(item, WAND_KEY); } if (needsSave) { saveState(); updateName(); updateLore(); } } public Wand(MagicController controller, ConfigurationSection config) { this(controller, DefaultWandMaterial, (short)0); load(config); loadProperties(); updateName(); updateLore(); saveState(); } protected Wand(MagicController controller, String templateName) throws UnknownWandException { this(controller); // Default to "default" wand if (templateName == null || templateName.length() == 0) { templateName = DEFAULT_WAND_TEMPLATE; } // Check for randomized/pre-enchanted wands int level = 0; if (templateName.contains("(")) { String levelString = templateName.substring(templateName.indexOf('(') + 1, templateName.length() - 1); try { level = Integer.parseInt(levelString); } catch (Exception ex) { throw new IllegalArgumentException(ex); } templateName = templateName.substring(0, templateName.indexOf('(')); } WandTemplate template = controller.getWandTemplate(templateName); if (template == null) { throw new UnknownWandException(templateName); } WandTemplate migrateTemplate = template.getMigrateTemplate(); if (migrateTemplate != null) { template = migrateTemplate; templateName = migrateTemplate.getKey(); } setTemplate(templateName); setProperty("version", CURRENT_VERSION); ConfigurationSection templateConfig = template.getConfiguration(); if (templateConfig == null) { throw new UnknownWandException(templateName); } // Load all properties loadProperties(); // Enchant, if an enchanting level was provided if (level > 0) { // Account for randomized locked wands boolean wasLocked = locked; locked = false; randomize(level, false, null, true); locked = wasLocked; } // Don't randomize now if set to randomize later // Otherwise, do this here so the description updates if (!randomizeOnActivate) { randomize(); } updateName(); updateLore(); saveState(); } public Wand(MagicController controller, Material icon, short iconData) { // This will make the Bukkit ItemStack into a real ItemStack with NBT data. this(controller, InventoryUtils.makeReal(new ItemStack(icon, 1, iconData))); saveState(); updateName(); } @Override @SuppressWarnings("unchecked") protected void migrate(int version, ConfigurationSection wandConfig) { // First migration, clean out wand data that matches template // We've done this twice now, the second time to handle removing hard-coded defaults that // were not present in the template configs. if (version <= 1) { ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(wandConfig.getString("template")); if (templateConfig != null) { // This is an unfortunate special case for wands waiting to be randomized String randomizeIcon = templateConfig.getString("randomize_icon"); String currentIcon = wandConfig.getString("icon"); if (randomizeIcon != null && currentIcon != null && randomizeIcon.equals(currentIcon)) { wandConfig.set("icon", null); } // This was a potentially leftover property from randomized wands we can ditch wandConfig.set("randomize", null); Set<String> keys = templateConfig.getKeys(false); for (String key : keys) { Object templateData = templateConfig.get(key); Object wandData = wandConfig.get(key); if (wandData == null) continue; String templateString = templateData.toString(); String wandString = wandData.toString(); if (templateData instanceof List) { templateString = templateString.substring(1, templateString.length() - 1); templateString = templateString.replace(", ", ","); templateData = templateString; } if (wandString.equalsIgnoreCase(templateString)) { wandConfig.set(key, null); continue; } try { double numericValue = Double.parseDouble(wandString); double numericTemplate = Double.parseDouble(templateString); if (numericValue == numericTemplate) { wandConfig.set(key, null); continue; } } catch (NumberFormatException ignored) { } if (wandData.equals(templateData)) { wandConfig.set(key, null); } } } } // Remove icon if matches template if (version <= 3) { ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(wandConfig.getString("template")); String templateIcon = templateConfig == null ? null : templateConfig.getString("icon"); if (templateIcon != null && templateIcon.equals(wandConfig.getString("icon", ""))) { wandConfig.set("icon", null); } } // Migration: remove level from spell inventory if (version <= 4) { Object spellInventoryRaw = wandConfig.get("spell_inventory"); if (spellInventoryRaw != null) { Map<String, ? extends Object> spellInventory = null; Map<String, Integer> newSpellInventory = new HashMap<>(); if (spellInventoryRaw instanceof Map) { spellInventory = (Map<String, ? extends Object>)spellInventoryRaw; } else if (spellInventoryRaw instanceof ConfigurationSection) { spellInventory = NMSUtils.getMap((ConfigurationSection)spellInventoryRaw); } if (spellInventory != null) { for (Map.Entry<String, ? extends Object> spellEntry : spellInventory.entrySet()) { Object slot = spellEntry.getValue(); if (slot != null && slot instanceof Integer) { SpellKey spellKey = new SpellKey(spellEntry.getKey()); // Prefer to use the base spell if present since that is what we'd be // using on load. Object testSlot = spellInventory.get(spellKey.getBaseKey()); if (testSlot != null) { slot = testSlot; } newSpellInventory.put(spellKey.getBaseKey(), (Integer)slot); } } wandConfig.set("spell_inventory", newSpellInventory); } } } // Migration: move attributes to item_attributes if (version <= 5) { ConfigurationSection attributes = wandConfig.getConfigurationSection("attributes"); wandConfig.set("attributes", null); wandConfig.set("item_attributes", attributes); } super.migrate(version, wandConfig); } @Override public void load(ConfigurationSection configuration) { if (configuration != null) { setTemplate(configuration.getString("template")); } super.load(configuration); } protected void updateHotbarCount() { int hotbarCount = Math.max(1, getInt("hotbar_count", 1)); if (hotbarCount != hotbars.size()) { if (isInventoryOpen()) { closeInventory(); } hotbars.clear(); while (hotbars.size() < hotbarCount) { hotbars.add(CompatibilityUtils.createInventory(null, HOTBAR_INVENTORY_SIZE, getInventoryTitle())); } while (hotbars.size() > hotbarCount) { hotbars.remove(0); } } } @Override public void unenchant() { controller.cleanItem(item); clear(); } public void updateItemIcon() { setIcon(icon); } protected void updateIcon() { if (icon != null && icon.getMaterial() != null && icon.getMaterial() != Material.AIR) { String iconKey = icon.getKey(); if (iconKey != null && iconKey.isEmpty()) { iconKey = null; } WandTemplate template = getTemplate(); String templateIcon = template != null ? template.getProperty("icon", "") : null; if (templateIcon == null || !templateIcon.equals(iconKey)) { setProperty("icon", iconKey); } } } @Override public void setInactiveIcon(com.elmakers.mine.bukkit.api.block.MaterialAndData materialData) { if (materialData == null) { inactiveIcon = null; } else if (materialData instanceof MaterialAndData) { inactiveIcon = ((MaterialAndData)materialData); } else { inactiveIcon = new MaterialAndData(materialData); } String inactiveIconKey = null; if (inactiveIcon != null && inactiveIcon.getMaterial() != null && inactiveIcon.getMaterial() != Material.AIR) { inactiveIconKey = inactiveIcon.getKey(); if (inactiveIconKey != null && inactiveIconKey.isEmpty()) { inactiveIconKey = null; } } setProperty("inactive_icon", inactiveIconKey); updateItemIcon(); } public void setIcon(Material material, byte data) { setIcon(material == null ? null : new MaterialAndData(material, data)); updateIcon(); } @Override public void setIcon(com.elmakers.mine.bukkit.api.block.MaterialAndData materialData) { if (materialData instanceof MaterialAndData) { setIcon((MaterialAndData)materialData); } else { setIcon(new MaterialAndData(materialData)); } updateIcon(); } public void setIcon(MaterialAndData materialData) { if (materialData == null || !materialData.isValid()) return; if (materialData.getMaterial() == Material.AIR || materialData.getMaterial() == null) { materialData.setMaterial(DefaultWandMaterial); } icon = materialData; if (item == null) { item = InventoryUtils.makeReal(this.icon.getItemStack(1)); } Short durability = null; if (!indestructible && !isUpgrade && icon.getMaterial().getMaxDurability() > 0) { durability = item.getDurability(); } try { if (inactiveIcon == null || (mage != null && getMode() == WandMode.INVENTORY && isInventoryOpen())) { icon.applyToItem(item); } else { inactiveIcon.applyToItem(item); } } catch (Exception ex) { controller.getLogger().log(Level.WARNING, "Unable to apply wand icon", ex); item.setType(DefaultWandMaterial); } if (durability != null) { item.setDurability(durability); } // Make indestructible // The isUpgrade checks here and above are for using custom icons in 1.9, this is a bit hacky. if ((indestructible || Unbreakable || isUpgrade) && !manaMode.useDurability()) { CompatibilityUtils.makeUnbreakable(item); } else { CompatibilityUtils.removeUnbreakable(item); } CompatibilityUtils.hideFlags(item, getProperty("hide_flags", HIDE_FLAGS)); } @Override public void makeUpgrade() { if (!isUpgrade) { isUpgrade = true; String oldName = wandName; String newName = getMessage("upgrade_name"); newName = newName.replace("$name", oldName); String newDescription = controller.getMessages().get("wand.upgrade_default_description"); if (template != null && template.length() > 0) { newDescription = controller.getMessages().get("wands." + template + ".upgrade_description", description); } setIcon(DefaultUpgradeMaterial, (byte) 0); setName(newName); setDescription(newDescription); InventoryUtils.removeMeta(item, WAND_KEY); saveState(); updateName(true); updateLore(); } } public void newId() { id = UUID.randomUUID().toString(); setProperty("id", id); } public boolean checkId() { if (id == null || id.length() == 0) { newId(); return true; } return false; } @Override public String getId() { return id; } public boolean isModifiable() { return !locked; } @Override public boolean isIndestructible() { return indestructible; } @Override public boolean isUndroppable() { return undroppable; } public boolean isUpgrade() { return isUpgrade; } public static boolean isUpgrade(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, UPGRADE_KEY); } @Override public boolean usesMana() { if (isCostFree()) return false; return getManaMax() > 0 || (isHeroes && mage != null); } @Override public void removeMana(float amount) { if (isHeroes && mage != null) { HeroesManager heroes = controller.getHeroes(); if (heroes != null) { heroes.removeMana(mage.getPlayer(), (int)Math.ceil(amount)); } } super.removeMana(amount); updateMana(); } @Override public float getCostReduction() { if (mage != null) { float reduction = mage.getCostReduction(); return passive ? reduction : stackPassiveProperty(reduction, costReduction * controller.getMaxCostReduction()); } return costReduction; } @Override public float getCooldownReduction() { if (mage != null) { float reduction = mage.getCooldownReduction(); return passive ? reduction : stackPassiveProperty(reduction, cooldownReduction * controller.getMaxCooldownReduction()); } return cooldownReduction; } @Override public float getConsumeReduction() { if (mage != null) { float reduction = mage.getConsumeReduction(); return passive ? reduction : stackPassiveProperty(reduction, consumeReduction); } return consumeReduction; } @Override public float getCostScale() { return 1; } @Override public boolean hasInventory() { return hasInventory; } @Override public float getPower() { return power; } @Override public boolean isSuperProtected() { return superProtected; } @Override public boolean isSuperPowered() { return superPowered; } @Override public boolean isConsumeFree() { return consumeReduction >= 1; } @Override public boolean isCooldownFree() { return cooldownReduction > 1; } @Override public String getName() { return ChatColor.translateAlternateColorCodes('&', wandName); } public String getDescription() { return description; } public String getOwner() { return owner == null ? "" : owner; } public String getOwnerId() { return ownerId; } @Override public long getWorth() { long worth = 0; // TODO: Item properties, brushes, etc Set<String> spells = getSpells(); for (String spellKey : spells) { SpellTemplate spell = controller.getSpellTemplate(spellKey); if (spell != null) { worth = (long)(worth + spell.getWorth()); } } return worth; } @Override public void setName(String name) { wandName = ChatColor.stripColor(name); setProperty("name", wandName); updateName(); } public void setTemplate(String templateName) { this.template = templateName; WandTemplate wandTemplate = controller.getWandTemplate(templateName); if (wandTemplate != null) { setWandTemplate(wandTemplate); } setProperty("template", template); } @Override public String getTemplateKey() { return this.template; } @Override public boolean hasTag(String tag) { WandTemplate template = getTemplate(); return template != null && template.hasTag(tag); } @Override public WandUpgradePath getPath() { String pathKey = path; if (pathKey == null || pathKey.length() == 0) { pathKey = controller.getDefaultWandPath(); } return WandUpgradePath.getPath(pathKey); } public boolean hasPath() { return path != null && path.length() > 0; } @Override public void setDescription(String description) { this.description = description; setProperty("description", description); updateLore(); } public boolean tryToOwn(Player player) { if (ownerId == null || ownerId.length() == 0) { takeOwnership(player); return true; } return false; } public void takeOwnership(Player player) { Mage mage = this.mage; if (mage == null) { mage = controller.getMage(player); } if ((ownerId == null || ownerId.length() == 0) && quietLevel < 2) { mage.sendMessage(getMessage("bound_instructions", "").replace("$wand", getName())); String spellKey = getActiveSpellKey(); SpellTemplate spellTemplate = spellKey != null && !spellKey.isEmpty() ? controller.getSpellTemplate(spellKey) : null; if (spellTemplate != null) { String message = getMessage("spell_instructions", "").replace("$wand", getName()); mage.sendMessage(message.replace("$spell", spellTemplate.getName())); } if (spells.size() > 1) { String controlKey = getControlKey(WandAction.TOGGLE); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage("inventory_instructions", "") .replace("$wand", getName()).replace("$toggle", controlKey)); } } com.elmakers.mine.bukkit.api.wand.WandUpgradePath path = getPath(); if (path != null) { String message = getMessage("enchant_instructions", "").replace("$wand", getName()); mage.sendMessage(message); } } owner = ChatColor.stripColor(player.getDisplayName()); ownerId = mage.getId(); setProperty("owner", owner); setProperty("owner_id", ownerId); updateLore(); saveState(); } @Nullable public String getControlKey(WandAction action) { String controlKey = null; if (rightClickAction == action) { controlKey = "right_click"; } else if (dropAction == action) { controlKey = "drop"; } else if (leftClickAction == action) { controlKey = "left_click"; } else if (swapAction == action) { controlKey = "swap"; } return controlKey; } @Nullable @Override public ItemStack getItem() { return item; } public void setItem(ItemStack item) { this.item = item; } @Override public com.elmakers.mine.bukkit.api.block.MaterialAndData getIcon() { return icon; } @Override public com.elmakers.mine.bukkit.api.block.MaterialAndData getInactiveIcon() { return inactiveIcon; } protected List<Inventory> getAllInventories() { int hotbarCount = getHotbarCount(); List<Inventory> allInventories = new ArrayList<>(inventories.size() + hotbarCount); if (hotbarCount > 0) { allInventories.addAll(hotbars); } allInventories.addAll(inventories); return allInventories; } @Override public Set<String> getBaseSpells() { return spells; } @Override protected @Nonnull Map<String, Integer> getSpellLevels() { return spellLevels; } @Override public Set<String> getSpells() { Set<String> spellSet = new HashSet<>(); for (String key : spells) { Integer level = spellLevels.get(key); if (level != null) { spellSet.add(new SpellKey(key, level).getKey()); } else { spellSet.add(key); } } return spellSet; } @Override public Set<String> getBrushes() { return brushes; } @Nullable protected Integer parseSlot(String[] pieces) { Integer slot = null; if (pieces.length > 1) { try { slot = Integer.parseInt(pieces[1]); } catch (Exception ex) { slot = null; } if (slot != null && slot < 0) { slot = null; } } return slot; } protected void addToInventory(ItemStack itemStack, Integer slot) { if (slot == null) { addToInventory(itemStack); return; } Inventory inventory = getInventory(slot); slot = getInventorySlot(slot); ItemStack existing = inventory.getItem(slot); inventory.setItem(slot, itemStack); if (existing != null && existing.getType() != Material.AIR) { addToInventory(existing); } } public void addToInventory(ItemStack itemStack) { if (itemStack == null || itemStack.getType() == Material.AIR) { return; } if (getBrushMode() != WandMode.INVENTORY && isBrush(itemStack)) { String brushKey = getBrush(itemStack); if (!MaterialBrush.isSpecialMaterialKey(brushKey) || MaterialBrush.isSchematic(brushKey)) { return; } } List<Inventory> checkInventories = getAllInventories(); boolean added = false; WandMode mode = getMode(); int fullSlot = 0; for (Inventory inventory : checkInventories) { int inventorySize = inventory.getSize(); Integer slot = null; int freeSpace = 0; for (int i = 0; i < inventorySize && freeSpace < INVENTORY_ORGANIZE_BUFFER; i++) { ItemStack existing = inventory.getItem(i); if (InventoryUtils.isEmpty(existing)) { if (slot == null) { slot = i; } freeSpace++; } } // Don't leave free space in hotbars if (slot != null && (freeSpace >= INVENTORY_ORGANIZE_BUFFER || inventorySize == HOTBAR_INVENTORY_SIZE || mode == WandMode.CHEST)) { added = true; inventory.setItem(slot, itemStack); fullSlot += slot; break; } fullSlot += inventory.getSize(); } if (!added) { fullSlot = getHotbarSize() + getInventorySize() * inventories.size(); Inventory newInventory = CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle()); newInventory.addItem(itemStack); inventories.add(newInventory); } updateSlot(fullSlot, itemStack); } protected @Nonnull Inventory getInventoryByIndex(int inventoryIndex) { // Auto create while (inventoryIndex >= inventories.size()) { inventories.add(CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle())); } return inventories.get(inventoryIndex); } protected int getHotbarSize() { if (getMode() != WandMode.INVENTORY) return 0; return hotbars.size() * HOTBAR_INVENTORY_SIZE; } protected @Nonnull Inventory getInventory(int slot) { int hotbarSize = getHotbarSize(); if (slot < hotbarSize) { return hotbars.get(slot / HOTBAR_INVENTORY_SIZE); } int inventoryIndex = (slot - hotbarSize) / getInventorySize(); return getInventoryByIndex(inventoryIndex); } protected int getInventorySlot(int slot) { int hotbarSize = getHotbarSize(); if (slot < hotbarSize) { return slot % HOTBAR_INVENTORY_SIZE; } return ((slot - hotbarSize) % getInventorySize()); } protected void buildInventory() { // Force an update of the display inventory since chest mode is a different size displayInventory = null; updateHotbarCount(); for (Inventory hotbar : hotbars) { hotbar.clear(); } inventories.clear(); List<ItemStack> unsorted = new ArrayList<>(); for (String key : spells) { int spellLevel = getSpellLevel(key); SpellKey spellKey = new SpellKey(key, spellLevel); SpellTemplate spell = mage == null ? controller.getSpellTemplate(spellKey.getKey()) : mage.getSpell(spellKey.getKey()); ItemStack itemStack = createSpellItem(spellKey.getKey(), "", false); if (itemStack != null) { Integer slot = spellInventory.get(spell.getSpellKey().getBaseKey()); if (slot == null) { unsorted.add(itemStack); } else { addToInventory(itemStack, slot); } } } WandMode brushMode = getBrushMode(); for (String brushKey : brushes) { boolean addToInventory = brushMode == WandMode.INVENTORY || (MaterialBrush.isSpecialMaterialKey(brushKey) && !MaterialBrush.isSchematic(brushKey)); if (addToInventory) { ItemStack itemStack = createBrushIcon(brushKey); if (itemStack == null) { controller.getPlugin().getLogger().warning("Unable to create brush icon for key " + brushKey); continue; } Integer slot = brushInventory.get(brushKey); if (activeBrush == null || activeBrush.length() == 0) activeBrush = brushKey; addToInventory(itemStack, slot); } } for (ItemStack unsortedItem : unsorted) { addToInventory(unsortedItem); } updateHasInventory(); if (openInventoryPage >= inventories.size() && openInventoryPage != 0 && hasInventory) { setOpenInventoryPage(0); } } protected void parseSpells(String spellString) { // Support YML-List-As-String format // Maybe don't need this anymore since loading lists is now a separate path spellString = spellString.replaceAll("[\\]\\[]", ""); String[] spellNames = StringUtils.split(spellString, ','); loadSpells(Arrays.asList(spellNames)); } protected void clearSpells() { spellLevels.clear(); spells.clear(); } protected void loadSpells(Collection<String> spellKeys) { clearSpells(); WandUpgradePath path = getPath(); for (String spellName : spellKeys) { String[] pieces = StringUtils.split(spellName, '@'); Integer slot = parseSlot(pieces); // Handle aliases and upgrades smoothly String loadedKey = pieces[0].trim(); SpellKey spellKey = new SpellKey(loadedKey); SpellTemplate spell = controller.getSpellTemplate(loadedKey); if (limitSpellsToPath && path != null && !path.containsSpell(spellKey.getBaseKey())) continue; // Downgrade spells if higher levels have gone missing while (spell == null && spellKey.getLevel() > 0) { spellKey = new SpellKey(spellKey.getBaseKey(), spellKey.getLevel() - 1); spell = controller.getSpellTemplate(spellKey.getKey()); } if (spell != null) { spellKey = spell.getSpellKey(); Integer currentLevel = spellLevels.get(spellKey.getBaseKey()); if (spellKey.getLevel() > 1 && (currentLevel == null || currentLevel < spellKey.getLevel())) { setSpellLevel(spellKey.getBaseKey(), spellKey.getLevel()); } if (slot != null) { spellInventory.put(spellKey.getBaseKey(), slot); } spells.add(spellKey.getBaseKey()); if (activeSpell == null || activeSpell.length() == 0) { activeSpell = spellKey.getBaseKey(); } } } } private void loadSpells() { Object wandSpells = getObject("spells"); if (wandSpells != null) { if (wandSpells instanceof String) { parseSpells((String)wandSpells); } else if (wandSpells instanceof Collection) { @SuppressWarnings("unchecked") Collection<String> spellList = (Collection<String>)wandSpells; loadSpells(spellList); } else { clearSpells(); } } else { clearSpells(); } } protected void parseBrushes(String brushString) { // Support YML-List-As-String format // Maybe don't need this anymore since loading lists is now a separate path brushString = brushString.replaceAll("[\\]\\[]", ""); String[] brushNames = StringUtils.split(brushString, ','); loadBrushes(Arrays.asList(brushNames)); } protected void clearBrushes() { brushes.clear(); } protected void loadBrushes(Collection<String> brushKeys) { WandUpgradePath path = getPath(); clearBrushes(); for (String materialName : brushKeys) { String[] pieces = StringUtils.split(materialName, '@'); Integer slot = parseSlot(pieces); String materialKey = pieces[0].trim(); if (limitBrushesToPath && path != null && !path.containsBrush(materialKey)) continue; if (slot != null) { brushInventory.put(materialKey, slot); } brushes.add(materialKey); } } private void loadBrushes() { Object wandBrushes = getObject("brushes", getObject("materials")); if (wandBrushes != null) { if (wandBrushes instanceof String) { parseBrushes((String)wandBrushes); } else if (wandBrushes instanceof Collection) { @SuppressWarnings("unchecked") Collection<String> brushList = (Collection<String>)wandBrushes; loadBrushes(brushList); } else { clearBrushes(); } } else { clearBrushes(); } } protected void loadBrushInventory(Map<String, ? extends Object> inventory) { if (inventory == null) return; WandUpgradePath path = getPath(); for (Map.Entry<String, ?> brushEntry : inventory.entrySet()) { Object slot = brushEntry.getValue(); String brushKey = brushEntry.getKey(); if (limitBrushesToPath && path != null && !path.containsBrush(brushKey)) continue; if (slot != null && slot instanceof Integer) { brushInventory.put(brushKey, (Integer)slot); } } } protected void loadSpellInventory(Map<String, ? extends Object> inventory) { if (inventory == null) return; WandUpgradePath path = getPath(); for (Map.Entry<String, ? extends Object> spellEntry : inventory.entrySet()) { String spellKey = spellEntry.getKey(); if (limitSpellsToPath && path != null && !path.containsSpell(spellKey)) continue; Object slot = spellEntry.getValue(); if (slot != null && slot instanceof Integer) { spellInventory.put(spellKey, (Integer)slot); } } } protected void loadSpellLevels(Map<String, ? extends Object> levels) { if (levels == null) return; for (Map.Entry<String, ? extends Object> spellEntry : levels.entrySet()) { Object level = spellEntry.getValue(); if (level != null && level instanceof Integer) { setSpellLevel(spellEntry.getKey(), (Integer)level); } } } @Nullable public static ItemStack createSpellItem(String spellKey, MagicController controller, Wand wand, boolean isItem) { String[] split = spellKey.split(" ", 2); return createSpellItem(controller.getSpellTemplate(split[0]), split.length > 1 ? split[1] : "", controller, wand == null ? null : wand.getActiveMage(), wand, isItem); } @Nullable public static ItemStack createSpellItem(String spellKey, MagicController controller, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, boolean isItem) { String[] split = spellKey.split(" ", 2); return createSpellItem(controller.getSpellTemplate(split[0]), split.length > 1 ? split[1] : "", controller, mage, wand, isItem); } @Nullable public ItemStack createSpellItem(String spellKey) { return createSpellItem(spellKey, "", false); } @Nullable public ItemStack createSpellItem(String spellKey, String args, boolean isItem) { SpellTemplate spell = mage == null ? controller.getSpellTemplate(spellKey) : mage.getSpell(spellKey); return createSpellItem(spell, args, controller, mage, this, isItem); } @Nullable public static ItemStack createSpellItem(SpellTemplate spell, String args, MagicController controller, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, boolean isItem) { if (spell == null) return null; String iconURL = spell.getIconURL(); ItemStack itemStack = null; if (iconURL != null && (controller.isUrlIconsEnabled() || spell.getIcon() == null || !spell.getIcon().isValid() || spell.getIcon().getMaterial() == Material.AIR)) { itemStack = controller.getURLSkull(iconURL); } if (itemStack == null) { ItemStack originalItemStack = null; com.elmakers.mine.bukkit.api.block.MaterialAndData icon = spell.getIcon(); if (icon == null) { controller.getPlugin().getLogger().warning("Unable to create spell icon for " + spell.getName() + ", missing material"); return null; } try { originalItemStack = new ItemStack(icon.getMaterial(), 1, icon.getData()); itemStack = InventoryUtils.makeReal(originalItemStack); } catch (Exception ex) { itemStack = null; } if (itemStack == null) { if (icon.getMaterial() != Material.AIR) { String iconName = icon.getName(); controller.getPlugin().getLogger().warning("Unable to create spell icon for " + spell.getKey() + " with material " + iconName); } return originalItemStack; } } InventoryUtils.makeUnbreakable(itemStack); InventoryUtils.hideFlags(itemStack, (byte)63); updateSpellItem(controller.getMessages(), itemStack, spell, args, mage, wand, wand == null ? null : wand.activeBrush, isItem); if (wand != null && wand.getMode() == WandMode.SKILLS && !isItem) { String mageClassKey = wand.getMageClassKey(); ConfigurationSection skillsConfig = wand.getConfigurationSection("skills"); InventoryUtils.configureSkillItem(itemStack, mageClassKey, skillsConfig); } return itemStack; } @Nullable protected ItemStack createBrushIcon(String materialKey) { return createBrushItem(materialKey, controller, this, false); } @Nullable public static ItemStack createBrushItem(String materialKey, com.elmakers.mine.bukkit.api.magic.MageController controller, Wand wand, boolean isItem) { MaterialBrush brushData = MaterialBrush.parseMaterialKey(materialKey); if (brushData == null) return null; ItemStack itemStack = brushData.getItem(controller, isItem); if (BrushGlow || (isItem && BrushItemGlow)) { CompatibilityUtils.addGlow(itemStack); } InventoryUtils.makeUnbreakable(itemStack); InventoryUtils.hideFlags(itemStack, (byte)63); updateBrushItem(controller.getMessages(), itemStack, brushData, wand); return itemStack; } protected boolean findItem() { if (mage != null && item != null) { Player player = mage.getPlayer(); if (player != null) { ItemStack itemInHand = player.getInventory().getItemInMainHand(); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = false; return true; } itemInHand = player.getInventory().getItemInOffHand(); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = true; return true; } itemInHand = player.getInventory().getItem(heldSlot); if (itemInHand != null && !InventoryUtils.isSameInstance(itemInHand, item) && itemInHand.equals(item)) { item = itemInHand; isInOffhand = true; return true; } } } return false; } @Override public void saveState() { // Make sure we're on the current item instance if (findItem()) { updateItemIcon(); updateName(); updateLore(); } if (item == null || item.getType() == Material.AIR) return; // Check for upgrades that still have wand data if (isUpgrade && isWand(item)) { InventoryUtils.removeMeta(item, WAND_KEY); } Object wandNode = InventoryUtils.createNode(item, isUpgrade ? UPGRADE_KEY : WAND_KEY); if (wandNode == null) { controller.getLogger().warning("Failed to save wand state for wand to : " + item); } else { InventoryUtils.saveTagsToNBT(getConfiguration(), wandNode); } } @Nullable public static ConfigurationSection itemToConfig(ItemStack item, ConfigurationSection stateNode) { Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) { wandNode = InventoryUtils.getNode(item, UPGRADE_KEY); if (wandNode == null) { return null; } } ConfigurationUtils.loadAllTagsFromNBT(stateNode, wandNode); return stateNode; } public static void configToItem(ConfigurationSection itemSection, ItemStack item) { ConfigurationSection stateNode = itemSection.getConfigurationSection("wand"); Object wandNode = InventoryUtils.createNode(item, Wand.WAND_KEY); if (wandNode != null) { InventoryUtils.saveTagsToNBT(stateNode, wandNode); } } @Nullable protected String getPotionEffectString() { return getPotionEffectString(potionEffects); } @Override public void save(ConfigurationSection node, boolean filtered) { ConfigurationUtils.addConfigurations(node, getConfiguration()); // Filter out some fields if (filtered) { node.set("id", null); node.set("owner_id", null); node.set("owner", null); node.set("template", null); node.set("mana_timestamp", null); node.set("enchant_count", null); } if (isUpgrade) { node.set("upgrade", true); } if (template != null && !template.isEmpty()) { node.set("template", null); node.set("inherit", template); } } public void save() { saveState(); updateName(); updateLore(); } public void updateBrushInventory() { if (brushInventory.isEmpty()) { setProperty("brush_inventory", null); } else { setProperty("brush_inventory", new HashMap<>(brushInventory)); } } protected void updateBrushInventory(Map<String, Integer> updateBrushes) { for (Map.Entry<String, Integer> brushEntry : brushInventory.entrySet()) { String brushKey = brushEntry.getKey(); Integer slot = updateBrushes.get(brushKey); if (slot != null) { brushEntry.setValue(slot); } } } public void updateSpellInventory() { if (spellInventory.isEmpty()) { setProperty("spell_inventory", null); } else { setProperty("spell_inventory", new HashMap<>(spellInventory)); } } protected void updateSpellInventory(Map<String, Integer> updateSpells) { for (Map.Entry<String, Integer> spellEntry : spellInventory.entrySet()) { String spellKey = spellEntry.getKey(); Integer slot = updateSpells.get(spellKey); if (slot != null) { spellEntry.setValue(slot); } } } public void setEffectColor(String hexColor) { // Annoying config conversion issue :\ if (hexColor.contains(".")) { hexColor = hexColor.substring(0, hexColor.indexOf('.')); } if (hexColor == null || hexColor.length() == 0 || hexColor.equals("none")) { effectColor = null; return; } effectColor = new ColorHD(hexColor); if (hexColor.equals("random")) { setProperty("effect_color", effectColor.toString()); } } private void migrateProtection(String legacy, String migrateTo) { if (hasProperty(legacy)) { double protection = getDouble(legacy); clearProperty(legacy); setProperty("protection." + migrateTo, protection); } } @Nullable private MaterialAndData loadIcon(String key) { if (key == null || key.isEmpty()) { return null; } ItemData itemData = controller.getOrCreateItem(key); com.elmakers.mine.bukkit.api.block.MaterialAndData materialData = itemData.getMaterialAndData(); return itemData != null && (materialData instanceof MaterialAndData) ? (MaterialAndData)materialData : null; } @Override public void loadProperties() { super.loadProperties(); locked = getBoolean("locked", locked); lockedAllowUpgrades = getBoolean("locked_allow_upgrades", false); consumeReduction = getFloat("consume_reduction"); cooldownReduction = getFloat("cooldown_reduction"); costReduction = getFloat("cost_reduction"); power = getFloat("power"); ConfigurationSection protectionConfig = getConfigurationSection("protection"); if (protectionConfig == null && hasProperty("protection")) { migrateProtection("protection", "overall"); migrateProtection("protection_physical", "physical"); migrateProtection("protection_projectiles", "projectile"); migrateProtection("protection_falling", "fall"); migrateProtection("protection_fire", "fire"); migrateProtection("protection_explosions", "explosion"); protectionConfig = getConfigurationSection("protection"); } if (protectionConfig != null) { protection = new HashMap<>(); for (String protectionKey : protectionConfig.getKeys(false)) { protection.put(protectionKey, protectionConfig.getDouble(protectionKey)); } } hasId = getBoolean("unique", false); blockChance = getFloat("block_chance"); blockReflectChance = getFloat("block_reflect_chance"); blockFOV = getFloat("block_fov"); blockMageCooldown = getInt("block_mage_cooldown"); blockCooldown = getInt("block_cooldown"); manaPerDamage = getFloat("mana_per_damage"); earnMultiplier = getFloat("earn_multiplier", getFloat("sp_multiplier", 1)); String singleClass = getString("class"); if (singleClass != null && !singleClass.isEmpty()) { mageClassKeys = new ArrayList<>(); mageClassKeys.add(singleClass); } else { mageClassKeys = getStringList("classes"); } // Check for single-use wands uses = getInt("uses"); hasUses = uses > 0; // Convert some legacy properties to potion effects float healthRegeneration = getFloat("health_regeneration", 0); float hungerRegeneration = getFloat("hunger_regeneration", 0); float speedIncrease = getFloat("haste", 0); if (speedIncrease > 0) { potionEffects.put(PotionEffectType.SPEED, 1); } if (healthRegeneration > 0) { potionEffects.put(PotionEffectType.REGENERATION, 1); } if (hungerRegeneration > 0) { potionEffects.put(PotionEffectType.SATURATION, 1); } // This overrides the value loaded in CasterProperties if (!regenWhileInactive) { setProperty("mana_timestamp", System.currentTimeMillis()); } if (hasProperty("effect_color")) { setEffectColor(getString("effect_color")); } id = getString("id"); isUpgrade = getBoolean("upgrade"); quietLevel = getInt("quiet"); effectBubbles = getBoolean("effect_bubbles"); keep = getBoolean("keep"); passive = getBoolean("passive"); indestructible = getBoolean("indestructible"); superPowered = getBoolean("powered"); superProtected = getBoolean("protected"); glow = getBoolean("glow"); undroppable = getBoolean("undroppable"); isHeroes = getBoolean("heroes"); bound = getBoolean("bound"); forceUpgrade = getBoolean("force"); autoOrganize = getBoolean("organize"); autoAlphabetize = getBoolean("alphabetize"); autoFill = getBoolean("fill"); rename = getBoolean("rename"); renameDescription = getBoolean("rename_description"); enchantCount = getInt("enchant_count"); maxEnchantCount = getInt("max_enchant_count"); inventoryRows = getInt("inventory_rows", 5); if (inventoryRows <= 0) inventoryRows = 1; if (hasProperty("effect_particle")) { effectParticle = ConfigurationUtils.toParticleEffect(getString("effect_particle")); effectParticleData = 0; } else { effectParticle = null; } if (hasProperty("effect_sound")) { effectSound = ConfigurationUtils.toSoundEffect(getString("effect_sound")); } else { effectSound = null; } activeEffectsOnly = getBoolean("active_effects"); effectParticleData = getFloat("effect_particle_data"); effectParticleCount = getInt("effect_particle_count"); effectParticleRadius = getDouble("effect_particle_radius"); effectParticleOffset = getDouble("effect_particle_offset"); effectParticleInterval = getInt("effect_particle_interval"); effectParticleMinVelocity = getDouble("effect_particle_min_velocity"); effectSoundInterval = getInt("effect_sound_interval"); castLocation = getVector("cast_location"); castInterval = getInt("cast_interval"); castMinVelocity = getDouble("cast_min_velocity"); castVelocityDirection = getVector("cast_velocity_direction"); castSpell = getString("cast_spell"); String castParameterString = getString("cast_parameters", null); if (castParameterString != null && !castParameterString.isEmpty()) { castParameters = new MemoryConfiguration(); ConfigurationUtils.addParameters(StringUtils.split(castParameterString, ' '), castParameters); } else { castParameters = null; } WandMode newMode = parseWandMode(getString("mode"), controller.getDefaultWandMode()); if (newMode != mode) { if (isInventoryOpen()) { closeInventory(); } mode = newMode; } brushMode = parseWandMode(getString("brush_mode"), controller.getDefaultBrushMode()); currencyDisplay = controller.getCurrency(getString("currency_display", "sp")); // Backwards compatibility if (getBoolean("mode_drop", false)) { dropAction = WandAction.TOGGLE; swapAction = WandAction.CYCLE_HOTBAR; rightClickAction = WandAction.NONE; quickCast = true; // This is to turn the redundant spell lore off quickCastDisabled = true; manualQuickCastDisabled = false; } else if (mode == WandMode.CAST) { leftClickAction = WandAction.CAST; rightClickAction = WandAction.CAST; swapAction = WandAction.NONE; dropAction = WandAction.NONE; } else if (mode == WandMode.CYCLE) { leftClickAction = WandAction.CAST; rightClickAction = WandAction.NONE; swapAction = WandAction.NONE; dropAction = WandAction.CYCLE; } else { leftClickAction = WandAction.NONE; rightClickAction = WandAction.NONE; dropAction = WandAction.NONE; swapAction = WandAction.NONE; quickCast = false; quickCastDisabled = false; manualQuickCastDisabled = false; } String quickCastType = getString("quick_cast", getString("mode_cast")); if (quickCastType != null) { if (quickCastType.equalsIgnoreCase("true")) { quickCast = true; // This is to turn the redundant spell lore off quickCastDisabled = true; manualQuickCastDisabled = false; } else if (quickCastType.equalsIgnoreCase("manual")) { quickCast = false; quickCastDisabled = true; manualQuickCastDisabled = false; } else if (quickCastType.equalsIgnoreCase("disable")) { quickCast = false; quickCastDisabled = true; manualQuickCastDisabled = true; } else { quickCast = false; quickCastDisabled = false; manualQuickCastDisabled = false; } } leftClickAction = parseWandAction(getString("left_click"), leftClickAction); rightClickAction = parseWandAction(getString("right_click"), rightClickAction); dropAction = parseWandAction(getString("drop"), dropAction); swapAction = parseWandAction(getString("swap"), swapAction); owner = getString("owner"); ownerId = getString("owner_id"); template = getString("template"); upgradeTemplate = getString("upgrade_template"); path = getString("path"); activeSpell = getString("active_spell"); if (activeSpell != null && activeSpell.contains("|")) { SpellKey activeKey = new SpellKey(activeSpell); activeSpell = activeKey.getBaseKey(); setProperty("active_spell", activeSpell); } alternateSpell = getString("alternate_spell"); alternateSpell2 = getString("alternate_spell2"); activeBrush = getString("active_brush", getString("active_material")); if (hasProperty("hotbar")) { currentHotbar = getInt("hotbar"); } if (hasProperty("page")) { int page = getInt("page"); if (page != openInventoryPage) { openInventoryPage = page; } } // Default to template names, override with localizations and finally with wand data wandName = controller.getMessages().get("wand.default_name"); description = ""; // Check for migration information in the template config ConfigurationSection templateConfig = null; if (template != null && !template.isEmpty()) { templateConfig = controller.getWandTemplateConfiguration(template); if (templateConfig != null) { wandName = templateConfig.getString("name", wandName); description = templateConfig.getString("description", description); int templateUses = templateConfig.getInt("uses"); isSingleUse = templateUses == 1; hasUses = hasUses || templateUses > 0; } wandName = controller.getMessages().get("wands." + template + ".name", wandName); description = controller.getMessages().get("wands." + template + ".description", description); } wandName = getString("name", wandName); description = getString("description", description); WandTemplate wandTemplate = getTemplate(); if (hasProperty("icon_inactive")) { String iconKey = getString("icon_inactive"); if (wandTemplate != null) { iconKey = wandTemplate.migrateIcon(iconKey); } if (iconKey != null) { inactiveIcon = loadIcon(iconKey); } } else { inactiveIcon = null; } if (inactiveIcon != null && (inactiveIcon.getMaterial() == null || inactiveIcon.getMaterial() == Material.AIR)) { inactiveIcon = null; } inactiveIconDelay = getInt("icon_inactive_delay"); randomizeOnActivate = randomizeOnActivate && hasProperty("randomize_icon"); if (randomizeOnActivate) { String randomizeIcon = getString("randomize_icon"); setIcon(loadIcon(randomizeIcon)); if (item == null) { controller.getLogger().warning("Invalid randomize_icon in wand '" + template + "' config: " + randomizeIcon); } } else if (hasProperty("icon")) { String iconKey = getString("icon"); if (wandTemplate != null) { iconKey = wandTemplate.migrateIcon(iconKey); } if (iconKey.contains(",")) { Random r = new Random(); String[] keys = StringUtils.split(iconKey, ','); iconKey = keys[r.nextInt(keys.length)]; } // Port old custom wand icons if (templateConfig != null && iconKey.contains("i.imgur.com")) { iconKey = templateConfig.getString("icon"); } setIcon(loadIcon(iconKey)); if (item == null) { controller.getLogger().warning("Invalid icon in wand '" + template + "' config: " + iconKey); } updateIcon(); } else if (isUpgrade) { setIcon(new MaterialAndData(DefaultUpgradeMaterial)); } else { setIcon(new MaterialAndData(DefaultWandMaterial)); } if (hasProperty("upgrade_icon")) { upgradeIcon = loadIcon(getString("upgrade_icon")); } // Add vanilla attributes InventoryUtils.applyAttributes(item, getConfigurationSection("item_attributes"), getString("item_attribute_slot", getString("attribute_slot"))); // Add unstashable and unmoveable tags if (getBoolean("unstashable") || (undroppable && Unstashable)) { InventoryUtils.setMetaBoolean(item, "unstashable", true); } else { InventoryUtils.removeMeta(item, "unstashable"); } if (getBoolean("unmoveable")) { InventoryUtils.setMetaBoolean(item, "unmoveable", true); } else { InventoryUtils.removeMeta(item, "unmoveable"); } if (undroppable) { InventoryUtils.setMetaBoolean(item, "undroppable", true); } else { InventoryUtils.removeMeta(item, "undroppable"); } if (keep) { InventoryUtils.setMetaBoolean(item, "keep", true); } else { InventoryUtils.removeMeta(item, "keep"); } // Add vanilla enchantments ConfigurationSection enchantments = getConfigurationSection("enchantments"); InventoryUtils.applyEnchantments(item, enchantments); // Add enchantment glow if (enchantments == null || enchantments.getKeys(false).isEmpty()) { if (glow) { CompatibilityUtils.addGlow(item); } else { CompatibilityUtils.removeGlow(item); } } // Check for path-based migration, may update icons com.elmakers.mine.bukkit.api.wand.WandUpgradePath upgradePath = getPath(); if (upgradePath != null) { hasSpellProgression = upgradePath.getSpells().size() > 0 || upgradePath.getExtraSpells().size() > 0 || upgradePath.getRequiredSpells().size() > 0; upgradePath.checkMigration(this); } else { hasSpellProgression = false; } if (isHeroes) { hasSpellProgression = true; } brushInventory.clear(); spellInventory.clear(); limitSpellsToPath = getBoolean("limit_spells_to_path"); limitBrushesToPath = getBoolean("limit_brushes_to_path"); loadSpells(); // Load spell levels Object spellLevelsRaw = getObject("spell_levels"); if (spellLevelsRaw != null) { // Not sure this will ever appear as a Map, but just in case if (spellLevelsRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> spellLevels = (Map<String, Integer>)spellLevelsRaw; loadSpellLevels(spellLevels); } else if (spellLevelsRaw instanceof ConfigurationSection) { loadSpellLevels(NMSUtils.getMap((ConfigurationSection)spellLevelsRaw)); } } checkActiveSpell(); loadBrushes(); Object brushInventoryRaw = getObject("brush_inventory"); if (brushInventoryRaw != null) { // Not sure this will ever appear as a Map, but just in case if (brushInventoryRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> brushInventory = (Map<String, Integer>)brushInventoryRaw; loadBrushInventory(brushInventory); } else if (brushInventoryRaw instanceof ConfigurationSection) { loadBrushInventory(NMSUtils.getMap((ConfigurationSection)brushInventoryRaw)); } } Object spellInventoryRaw = getObject("spell_inventory"); if (spellInventoryRaw != null) { // Not sure this will ever appear as a Map, but just in case if (spellInventoryRaw instanceof Map) { @SuppressWarnings("unchecked") Map<String, Integer> spellInventory = (Map<String, Integer>)spellInventoryRaw; loadSpellInventory(spellInventory); } else if (spellInventoryRaw instanceof ConfigurationSection) { loadSpellInventory(NMSUtils.getMap((ConfigurationSection)spellInventoryRaw)); } } else { // Spells may have contained an inventory from migration or templates with a spell@slot format. updateSpellInventory(); } castOverrides = null; if (hasProperty("overrides")) { castOverrides = null; Object overridesGeneric = getObject("overrides"); if (overridesGeneric != null) { castOverrides = new HashMap<>(); if (overridesGeneric instanceof String) { String overrides = (String) overridesGeneric; if (!overrides.isEmpty()) { // Support YML-List-As-String format // May not really need this anymore. overrides = overrides.replaceAll("[\\]\\[]", ""); String[] pairs = StringUtils.split(overrides, ','); for (String override : pairs) { parseOverride(override); } } } else if (overridesGeneric instanceof List) { @SuppressWarnings("unchecked") List<String> overrideList = (List<String>)overridesGeneric; for (String override : overrideList) { parseOverride(override); } } else if (overridesGeneric instanceof ConfigurationSection) { ConfigurationSection overridesSection = (ConfigurationSection)overridesGeneric; Set<String> keys = overridesSection.getKeys(true); for (String key : keys) { Object leaf = overridesSection.get(key); if (!(leaf instanceof ConfigurationSection) && !(leaf instanceof Map)) { castOverrides.put(key, leaf.toString()); } } } } } potionEffects.clear(); if (hasProperty("potion_effects")) { addPotionEffects(potionEffects, getString("potion_effects", null)); } // Some cleanup and sanity checks. In theory we don't need to store any non-zero value (as it is with the traders) // so try to keep defaults as 0/0.0/false. if (effectSound == null) { effectSoundInterval = 0; } else { effectSoundInterval = (effectSoundInterval == 0) ? 5 : effectSoundInterval; } if (effectParticle == null) { effectParticleInterval = 0; } checkActiveMaterial(); } private void parseOverride(String override) { // Unescape commas override = override.replace("\\|", ","); String[] keyValue = StringUtils.split(override, ' '); if (keyValue.length > 0) { String value = keyValue.length > 1 ? keyValue[1] : ""; castOverrides.put(keyValue[0], value); } } @Override public void describe(CommandSender sender, @Nullable Set<String> ignoreProperties) { ChatColor wandColor = isModifiable() ? ChatColor.AQUA : ChatColor.RED; sender.sendMessage(wandColor + getName()); if (isUpgrade) { sender.sendMessage(ChatColor.YELLOW + "(Upgrade)"); } if (description.length() > 0) { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.GREEN + description); } else { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.GREEN + "(No Description)"); } if (owner != null && owner.length() > 0 && ownerId != null && ownerId.length() > 0) { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.WHITE + owner + " (" + ChatColor.GRAY + ownerId + ChatColor.WHITE + ")"); } else { sender.sendMessage(ChatColor.ITALIC + "" + ChatColor.WHITE + "(No Owner)"); } super.describe(sender, ignoreProperties); WandTemplate template = getTemplate(); if (template != null) { sender.sendMessage("" + ChatColor.BOLD + ChatColor.GREEN + "Template Configuration:"); ConfigurationSection itemConfig = getConfiguration(); Set<String> ownKeys = itemConfig.getKeys(false); template.describe(sender, ignoreProperties, ownKeys); } } private static String getBrushDisplayName(Messages messages, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String materialName = brush == null ? null : brush.getName(messages); if (materialName == null) { materialName = "none"; } String brushPrefix = ChatColor.translateAlternateColorCodes('&', messages.get("wand.brush_prefix")); return brushPrefix + materialName; } private static String getSpellDisplayName(Messages messages, SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String name = ""; if (spell != null) { String spellPrefix = ChatColor.translateAlternateColorCodes('&', messages.get("wand.spell_prefix")); if (brush != null && spell.usesBrush()) { name = spellPrefix + spell.getName() + " " + getBrushDisplayName(messages, brush) + ChatColor.WHITE; } else { name = spellPrefix + spell.getName() + ChatColor.WHITE; } } return name; } private String getCustomName(String displayName, SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String name = displayName; // $name name = name.replace("$name", wandName); // $path String pathName = getPathName(); if (pathName != null) { name = name.replace("$path", pathName); } // $spell String spellName = spell == null ? "" : spell.getName(); name = name.replace("$spell", spellName); // $brush String brushName = brush == null ? "" : brush.getName(); name = name.replace("$brush", brushName); // $uses name = name.replace("$uses", Integer.toString(getRemainingUses())); return ChatColor.translateAlternateColorCodes('&', name); } private String getActiveWandName(SpellTemplate spell, com.elmakers.mine.bukkit.api.block.MaterialBrush brush) { String customName = getString("display_name"); if (customName != null && !customName.isEmpty()) { return getCustomName(customName, spell, brush); } // Build wand name int remaining = getRemainingUses(); String wandColorPrefix = (hasUses && remaining <= 1) ? "single_use_prefix" : isModifiable() ? (bound ? "bound_prefix" : "unbound_prefix") : (path != null && path.length() > 0 ? "has_path_prefix" : "unmodifiable_prefix"); String name = ChatColor.translateAlternateColorCodes('&', getMessage(wandColorPrefix)) + getDisplayName(); if (randomizeOnActivate) return name; Set<String> spells = getSpells(); // Add active spell to description Messages messages = controller.getMessages(); boolean showSpell = isModifiable() && hasSpellProgression(); showSpell = !quickCast && (spells.size() > 1 || showSpell) && getMode() != WandMode.SKILLS; if (spell != null && showSpell) { name = getSpellDisplayName(messages, spell, brush) + " (" + name + ChatColor.WHITE + ")"; } if (remaining > 1) { String message = getMessage("uses_remaining_brief"); name = name + ChatColor.DARK_RED + " (" + message.replace("$count", Integer.toString(remaining)) + ChatColor.DARK_RED + ")"; } return name; } private String getActiveWandName(SpellTemplate spell) { return getActiveWandName(spell, mage == null ? MaterialBrush.parseMaterialKey(activeBrush) : mage.getBrush()); } private String getActiveWandName(MaterialBrush brush) { SpellTemplate spell = null; if (activeSpell != null && activeSpell.length() > 0) { spell = controller.getSpellTemplate(activeSpell); } return getActiveWandName(spell, brush); } private String getActiveWandName() { SpellTemplate spell = null; if (activeSpell != null && activeSpell.length() > 0) { spell = controller.getSpellTemplate(activeSpell); } return getActiveWandName(spell); } protected String getDisplayName() { return ChatColor.translateAlternateColorCodes('&', randomizeOnActivate ? getMessage("randomized_name") : wandName); } public void updateName(boolean isActive) { if (isActive) { CompatibilityUtils.setDisplayName(item, !isUpgrade ? getActiveWandName() : ChatColor.translateAlternateColorCodes('&', getMessage("upgrade_prefix")) + getDisplayName()); } else { CompatibilityUtils.setDisplayName(item, ChatColor.stripColor(getDisplayName())); } } private void updateName() { updateName(true); } protected static String convertToHTML(String line) { int tagCount = 1; line = "<span style=\"color:white\">" + line; for (ChatColor c : ChatColor.values()) { tagCount += StringUtils.countMatches(line, c.toString()); String replaceStyle = ""; if (c == ChatColor.ITALIC) { replaceStyle = "font-style: italic"; } else if (c == ChatColor.BOLD) { replaceStyle = "font-weight: bold"; } else if (c == ChatColor.UNDERLINE) { replaceStyle = "text-decoration: underline"; } else { String color = c.name().toLowerCase().replace("_", ""); if (c == ChatColor.LIGHT_PURPLE) { color = "mediumpurple"; } replaceStyle = "color:" + color; } line = line.replace(c.toString(), "<span style=\"" + replaceStyle + "\">"); } for (int i = 0; i < tagCount; i++) { line += "</span>"; } return line; } public String getHTMLDescription() { Collection<String> rawLore = getLore(); Collection<String> lore = new ArrayList<>(); lore.add("<h2>" + convertToHTML(getActiveWandName()) + "</h2>"); for (String line : rawLore) { lore.add(convertToHTML(line)); } return "<div style=\"background-color: black; margin: 8px; padding: 8px\">" + StringUtils.join(lore, "<br/>") + "</div>"; } protected void addPropertyLore(List<String> lore, boolean isSingleSpell) { if (usesMana() && effectiveManaMax > 0) { int manaMax = getManaMax(); if (effectiveManaMax != manaMax) { String fullMessage = getLevelString("mana_amount_boosted", manaMax, controller.getMaxMana()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaMax)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_amount", manaMax, controller.getMaxMana()), lore); } int manaRegeneration = getManaRegeneration(); if (manaRegeneration > 0 && effectiveManaRegeneration > 0) { if (effectiveManaRegeneration != manaRegeneration) { String fullMessage = getLevelString("mana_regeneration_boosted", manaRegeneration, controller.getMaxManaRegeneration()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaRegeneration)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_regeneration", manaRegeneration, controller.getMaxManaRegeneration()), lore); } } if (manaPerDamage > 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_per_damage", manaPerDamage, controller.getMaxManaRegeneration()), lore); } } if (superPowered) { ConfigurationUtils.addIfNotEmpty(getMessage("super_powered"), lore); } if (blockReflectChance > 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("reflect_chance", blockReflectChance), lore); } else if (blockChance != 0) { ConfigurationUtils.addIfNotEmpty(getLevelString("block_chance", blockChance), lore); } float manaMaxBoost = getManaMaxBoost(); if (manaMaxBoost != 0) { ConfigurationUtils.addIfNotEmpty(getPropertyString("mana_boost", manaMaxBoost), lore); } float manaRegenerationBoost = getManaRegenerationBoost(); if (manaRegenerationBoost != 0) { ConfigurationUtils.addIfNotEmpty(getPropertyString("mana_regeneration_boost", manaRegenerationBoost), lore); } if (castSpell != null) { SpellTemplate spell = controller.getSpellTemplate(castSpell); if (spell != null) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_aura").replace("$spell", spell.getName()), lore); } } for (Map.Entry<PotionEffectType, Integer> effect : potionEffects.entrySet()) { ConfigurationUtils.addIfNotEmpty(describePotionEffect(effect.getKey(), effect.getValue()), lore); } // If this is a passive wand, then reduction properties stack onto the mage when worn. // In this case we should show it as such in the lore. if (passive) isSingleSpell = false; if (consumeReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("consume_reduction", consumeReduction), lore); if (costReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("cost_reduction", costReduction), lore); if (cooldownReduction != 0 && !isSingleSpell) ConfigurationUtils.addIfNotEmpty(getPropertyString("cooldown_reduction", cooldownReduction), lore); if (power > 0) ConfigurationUtils.addIfNotEmpty(getLevelString("power", power), lore); if (superProtected) { ConfigurationUtils.addIfNotEmpty(getMessage("super_protected"), lore); } else if (protection != null) { for (Map.Entry<String, Double> entry : protection.entrySet()) { String protectionType = entry.getKey(); double amount = entry.getValue(); addDamageTypeLore("protection", protectionType, amount, lore); } } ConfigurationSection weaknessConfig = getConfigurationSection("weakness"); if (weaknessConfig != null) { Set<String> keys = weaknessConfig.getKeys(false); for (String key : keys) { addDamageTypeLore("weakness", key, weaknessConfig.getDouble(key), lore); } } ConfigurationSection strengthConfig = getConfigurationSection("strength"); if (strengthConfig != null) { Set<String> keys = strengthConfig.getKeys(false); for (String key : keys) { addDamageTypeLore("strength", key, strengthConfig.getDouble(key), lore); } } if (earnMultiplier > 1) { String earnDescription = getPropertyString("earn_multiplier", earnMultiplier - 1); earnDescription = earnDescription.replace("$type", "SP"); ConfigurationUtils.addIfNotEmpty(earnDescription, lore); } ConfigurationSection attributes = getConfigurationSection("attributes"); if (attributes != null) { // Don't bother with the lore at all if the template has been blanked out String template = getMessage("attributes"); if (!template.isEmpty()) { Set<String> keys = attributes.getKeys(false); for (String key : keys) { String label = controller.getMessages().get("attributes." + key + ".name", key); // We are only display attributes as integers for now int value = attributes.getInt(key); if (value == 0) continue; float max = 1; MagicAttribute attribute = controller.getAttribute(key); if (attribute != null) { Double maxValue = attribute.getMax(); if (maxValue != null) { max = (float)(double)maxValue; } } label = getPropertyString("attributes", value, max).replace("$attribute", label); lore.add(label); } } } } private String getPropertyString(String templateName, float value) { return getPropertyString(templateName, value, 1); } private String getPropertyString(String templateName, float value, float max) { String propertyTemplate = getBoolean("stack") ? "property_stack" : "property_value"; if (value < 0) { propertyTemplate = propertyTemplate + "_negative"; } return controller.getMessages().getPropertyString(getMessageKey(templateName), value, max, getMessageKey(propertyTemplate)); } private String formatPropertyString(String template, float value) { return formatPropertyString(template, value, 1); } private String formatPropertyString(String template, float value, float max) { String propertyTemplate = getBoolean("stack") ? "property_stack" : "property_value"; if (value < 0) { propertyTemplate = propertyTemplate + "_negative"; } return controller.getMessages().formatPropertyString(template, value, max, getMessage(propertyTemplate)); } private void addDamageTypeLore(String property, String propertyType, double amount, List<String> lore) { if (amount != 0) { String templateKey = getMessageKey(property + "." + propertyType); String template; if (controller.getMessages().containsKey(templateKey)) { template = controller.getMessages().get(templateKey); } else { templateKey = getMessageKey("protection.unknown"); template = controller.getMessages().get(templateKey); String pretty = propertyType.substring(0, 1).toUpperCase() + propertyType.substring(1); template = template.replace("$type", pretty); } template = formatPropertyString(template, (float)amount); ConfigurationUtils.addIfNotEmpty(template, lore); } } public String getLevelString(String templateName, float amount) { return controller.getMessages().getLevelString(getMessageKey(templateName), amount); } public String getLevelString(String templateName, float amount, float max) { return controller.getMessages().getLevelString(getMessageKey(templateName), amount, max); } protected List<String> getCustomLore(Collection<String> loreTemplate) { List<String> lore = new ArrayList<>(); for (String line : loreTemplate) { if (line.startsWith("$")) { switch (line) { case "$description": addDescriptionLore(lore); break; case "$path": String pathTemplate = getMessage("path_lore", ""); String pathName = getPathName(); if (pathName != null && !pathTemplate.isEmpty()) { lore.add(pathTemplate.replace("$path", pathName)); } break; case "$owner": addOwnerDescription(lore); break; case "$spells": int spellCount = getSpells().size(); if (spellCount > 0) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_count").replace("$count", Integer.toString(spellCount)), lore); } break; case "$brushes": int materialCount = getBrushes().size(); if (materialCount > 0) { ConfigurationUtils.addIfNotEmpty(getMessage("material_count").replace("$count", Integer.toString(materialCount)), lore); } break; case "$uses": addUseLore(lore); break; case "$mana_max": if (usesMana()) { int manaMax = getManaMax(); if (effectiveManaMax != manaMax) { String fullMessage = getLevelString("mana_amount_boosted", manaMax, controller.getMaxMana()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaMax)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_amount", manaMax, controller.getMaxMana()), lore); } } break; case "$mana_regeneration": if (usesMana()) { int manaRegeneration = getManaRegeneration(); if (manaRegeneration > 0) { if (effectiveManaRegeneration != manaRegeneration) { String fullMessage = getLevelString("mana_regeneration_boosted", manaRegeneration, controller.getMaxManaRegeneration()); ConfigurationUtils.addIfNotEmpty(fullMessage.replace("$mana", Integer.toString(effectiveManaRegeneration)), lore); } else { ConfigurationUtils.addIfNotEmpty(getLevelString("mana_regeneration", manaRegeneration, controller.getMaxManaRegeneration()), lore); } } } break; default: lore.add(ChatColor.translateAlternateColorCodes('&', line)); } } else { lore.add(ChatColor.translateAlternateColorCodes('&', line)); } } return lore; } protected void addDescriptionLore(List<String> lore) { String descriptionTemplate = controller.getMessages().get(getMessageKey("description_lore"), ""); if (!description.isEmpty() && !descriptionTemplate.isEmpty()) { if (description.contains("$path")) { String pathName = getPathName(); String description = ChatColor.translateAlternateColorCodes('&', this.description); description = description.replace("$path", pathName == null ? "Unknown" : pathName); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } else { String description = ChatColor.translateAlternateColorCodes('&', this.description); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } } } @Nullable protected String getPathName() { String pathName = null; com.elmakers.mine.bukkit.api.wand.WandUpgradePath path = getPath(); if (path != null) { pathName = path.getName(); } else if (mageClassKeys != null && !mageClassKeys.isEmpty()) { MageClassTemplate classTemplate = controller.getMageClassTemplate(mageClassKeys.get(0)); if (classTemplate != null) { String pathKey = classTemplate.getProperty("path", ""); if (!pathKey.isEmpty()) { path = controller.getPath(pathKey); } if (path != null) { pathName = path.getName(); } else { pathName = classTemplate.getName(); } } } return pathName; } protected void addOwnerDescription(List<String> lore) { if (owner != null && owner.length() > 0) { if (bound) { String ownerDescription = getMessage("bound_description", "$name").replace("$name", owner); ConfigurationUtils.addIfNotEmpty(ownerDescription, lore); } else { String ownerDescription = getMessage("owner_description", "$name").replace("$name", owner); ConfigurationUtils.addIfNotEmpty(ownerDescription, lore); } } } @SuppressWarnings("unchecked") protected List<String> getLore() { Object customLore = getProperty("lore"); if (customLore != null && customLore instanceof Collection) { return getCustomLore((Collection<String>)customLore); } List<String> lore = new ArrayList<>(); int spellCount = getSpells().size(); int materialCount = getBrushes().size(); String pathName = getPathName(); if (description.length() > 0) { if (randomizeOnActivate) { String randomDescription = getMessage("randomized_lore"); String randomTemplate = controller.getMessages().get(getMessageKey("randomized_description"), ""); if (randomDescription.length() > 0 && !randomTemplate.isEmpty()) { InventoryUtils.wrapText(randomTemplate.replace("$description", randomDescription), lore); return lore; } } if (description.contains("$") && !description.contains("$path")) { String newDescription = controller.getMessages().escape(description); if (!newDescription.equals(description)) { this.description = newDescription; setProperty("description", description); } } String descriptionTemplate = controller.getMessages().get(getMessageKey("description_lore"), ""); if (description.contains("$path") && !descriptionTemplate.isEmpty()) { String description = ChatColor.translateAlternateColorCodes('&', this.description); description = description.replace("$path", pathName == null ? "Unknown" : pathName); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } else if (description.contains("$")) { String randomDescription = getMessage("randomized_lore"); String randomTemplate = controller.getMessages().get(getMessageKey("randomized_description"), ""); if (randomDescription.length() > 0 && !randomTemplate.isEmpty()) { randomDescription = ChatColor.translateAlternateColorCodes('&', randomDescription); InventoryUtils.wrapText(randomTemplate.replace("$description", randomDescription), lore); return lore; } } else if (!descriptionTemplate.isEmpty()) { String description = ChatColor.translateAlternateColorCodes('&', this.description); InventoryUtils.wrapText(descriptionTemplate.replace("$description", description), lore); } } String pathTemplate = getMessage("path_lore", ""); if (pathName != null && !pathTemplate.isEmpty()) { lore.add(pathTemplate.replace("$path", pathName)); } if (!isUpgrade) { addOwnerDescription(lore); } SpellTemplate spell = mage == null ? controller.getSpellTemplate(getActiveSpellKey()) : mage.getSpell(getActiveSpellKey()); Messages messages = controller.getMessages(); // This is here specifically for a wand that only has // one spell now, but may get more later. Since you // can't open the inventory in this state, you can not // otherwise see the spell lore. boolean isSingleSpell = spell != null && spellCount == 1 && !hasInventory && !isUpgrade; if (isSingleSpell) { addSpellLore(messages, spell, lore, getActiveMage(), this); } if (materialCount == 1 && activeBrush != null && activeBrush.length() > 0) { lore.add(getBrushDisplayName(messages, MaterialBrush.parseMaterialKey(activeBrush))); } if (spellCount > 0) { if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_spell_count").replace("$count", Integer.toString(spellCount)), lore); } else if (spellCount > 1) { ConfigurationUtils.addIfNotEmpty(getMessage("spell_count").replace("$count", Integer.toString(spellCount)), lore); } } if (materialCount > 0) { if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_material_count").replace("$count", Integer.toString(materialCount)), lore); } else if (materialCount > 1) { ConfigurationUtils.addIfNotEmpty(getMessage("material_count").replace("$count", Integer.toString(materialCount)), lore); } } addUseLore(lore); addPropertyLore(lore, isSingleSpell); if (isUpgrade) { ConfigurationUtils.addIfNotEmpty(getMessage("upgrade_item_description"), lore); } return lore; } protected void addUseLore(List<String> lore) { int remaining = getRemainingUses(); if (!isSingleUse && remaining > 0) { if (isUpgrade) { String message = (remaining == 1) ? getMessage("upgrade_uses_singular") : getMessage("upgrade_uses"); ConfigurationUtils.addIfNotEmpty(message.replace("$count", Integer.toString(remaining)), lore); } else { String message = (remaining == 1) ? getMessage("uses_remaining_singular") : getMessage("uses_remaining_brief"); ConfigurationUtils.addIfNotEmpty(message.replace("$count", Integer.toString(remaining)), lore); } } } protected void updateLore() { CompatibilityUtils.setLore(item, getLore()); } public int getRemainingUses() { return uses; } public void makeEnchantable(boolean enchantable) { if (EnchantableWandMaterial == null) return; if (!enchantable) { item.setType(icon.getMaterial()); item.setDurability(icon.getData()); } else { MaterialSet enchantableMaterials = controller.getMaterialSetManager().getMaterialSetEmpty("enchantable"); if (!enchantableMaterials.testItem(item)) { item.setType(EnchantableWandMaterial); item.setDurability((short) 0); } } updateName(); } public static boolean hasActiveWand(Player player) { if (player == null) return false; ItemStack activeItem = player.getInventory().getItemInMainHand(); return isWand(activeItem); } @Nullable public static Wand getActiveWand(MagicController controller, Player player) { ItemStack activeItem = player.getInventory().getItemInMainHand(); if (isWand(activeItem)) { return controller.getWand(activeItem); } return null; } public static boolean isWand(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, WAND_KEY); } public static boolean isWandOrUpgrade(ItemStack item) { return isWand(item) || isUpgrade(item); } public static boolean isSpecial(ItemStack item) { return isWand(item) || isUpgrade(item) || isSpell(item) || isBrush(item) || isSP(item) || isCurrency(item); } public static boolean isSelfDestructWand(ItemStack item) { return item != null && WAND_SELF_DESTRUCT_KEY != null && InventoryUtils.hasMeta(item, WAND_SELF_DESTRUCT_KEY); } public static boolean isSP(ItemStack item) { return InventoryUtils.hasMeta(item, "sp"); } public static boolean isCurrency(ItemStack item) { return InventoryUtils.hasMeta(item, "currency"); } @Nullable public static Integer getSP(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; String spNode = InventoryUtils.getMetaString(item, "sp"); if (spNode == null) return null; Integer sp = null; try { sp = Integer.parseInt(spNode); } catch (Exception ex) { sp = null; } return sp; } @Nullable public static Double getCurrencyAmount(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object currencyNode = InventoryUtils.getNode(item, "currency"); if (currencyNode == null) return null; return InventoryUtils.getMetaDouble(currencyNode, "amount"); } @Nullable public static String getCurrencyType(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object currencyNode = InventoryUtils.getNode(item, "currency"); if (currencyNode == null) return null; return InventoryUtils.getMetaString(currencyNode, "type"); } public static boolean isSpell(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "spell"); } public static boolean isSkill(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "skill"); } public static boolean isBrush(ItemStack item) { return item != null && InventoryUtils.hasMeta(item, "brush"); } @Nullable protected static Object getWandOrUpgradeNode(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) { wandNode = InventoryUtils.getNode(item, UPGRADE_KEY); } return wandNode; } @Nullable public static String getWandTemplate(ItemStack item) { Object wandNode = getWandOrUpgradeNode(item); if (wandNode == null) return null; return InventoryUtils.getMetaString(wandNode, "template"); } @Nullable public static String getWandId(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object wandNode = InventoryUtils.getNode(item, WAND_KEY); if (wandNode == null) return null; return InventoryUtils.getMetaString(wandNode, "id"); } @Nullable public static String getSpell(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "key"); } @Nullable @Override public Spell getSpell(String spellKey, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage == null) { return null; } if (!hasSpell(spellKey)) return null; SpellKey key = new SpellKey(spellKey); spellKey = key.getBaseKey(); Integer level = spellLevels.get(spellKey); if (level != null) { spellKey = new SpellKey(spellKey, level).getKey(); } return mage.getSpell(spellKey); } @Nullable @Override public Spell getSpell(String spellKey) { return getSpell(spellKey, mage); } @Nullable public static String getSpellClass(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "class"); } public static boolean isQuickCastSkill(ItemStack item) { if (InventoryUtils.isEmpty(item)) return false; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return false; Boolean quickCast = InventoryUtils.containsNode(spellNode, "quick_cast") ? InventoryUtils.getMetaBoolean(spellNode, "quick_cast") : null; return quickCast == null ? true : quickCast; } @Nullable public static String getSpellArgs(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object spellNode = InventoryUtils.getNode(item, "spell"); if (spellNode == null) return null; return InventoryUtils.getMetaString(spellNode, "args"); } @Nullable public static String getBrush(ItemStack item) { if (InventoryUtils.isEmpty(item)) return null; Object brushNode = InventoryUtils.getNode(item, "brush"); if (brushNode == null) return null; return InventoryUtils.getMetaString(brushNode, "key"); } protected void updateInventoryName(ItemStack item, boolean activeName) { if (isSpell(item)) { Spell spell = mage.getSpell(getSpell(item)); if (spell != null) { updateSpellName(controller.getMessages(), item, spell, activeName ? this : null, activeBrush); } } else if (isBrush(item)) { updateBrushName(controller.getMessages(), item, getBrush(item), activeName ? this : null); } } public static void updateSpellItem(Messages messages, ItemStack itemStack, SpellTemplate spell, String args, Wand wand, String activeMaterial, boolean isItem) { updateSpellItem(messages, itemStack, spell, args, wand == null ? null : wand.getActiveMage(), wand, activeMaterial, isItem); } public static void updateSpellItem(Messages messages, ItemStack itemStack, SpellTemplate spell, String args, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand, String activeMaterial, boolean isItem) { updateSpellName(messages, itemStack, spell, wand, activeMaterial); List<String> lore = new ArrayList<>(); addSpellLore(messages, spell, lore, mage, wand); if (isItem) { ConfigurationUtils.addIfNotEmpty(messages.get("wand.spell_item_description"), lore); } CompatibilityUtils.setLore(itemStack, lore); Object spellNode = CompatibilityUtils.createNode(itemStack, "spell"); CompatibilityUtils.setMeta(spellNode, "key", spell.getKey()); CompatibilityUtils.setMeta(spellNode, "args", args); if (SpellGlow) { CompatibilityUtils.addGlow(itemStack); } } public static void updateSpellName(Messages messages, ItemStack itemStack, SpellTemplate spell, Wand wand, String activeMaterial) { String displayName; if (wand != null && !wand.isQuickCast()) { displayName = wand.getActiveWandName(spell); } else { displayName = getSpellDisplayName(messages, spell, MaterialBrush.parseMaterialKey(activeMaterial)); } CompatibilityUtils.setDisplayName(itemStack, displayName); } public static void updateBrushName(Messages messages, ItemStack itemStack, String materialKey, Wand wand) { updateBrushName(messages, itemStack, MaterialBrush.parseMaterialKey(materialKey), wand); } public static void updateBrushName(Messages messages, ItemStack itemStack, MaterialBrush brush, Wand wand) { String displayName; if (wand != null) { Spell activeSpell = wand.getActiveSpell(); if (activeSpell != null && activeSpell.usesBrush()) { displayName = wand.getActiveWandName(brush); } else { displayName = ChatColor.RED + brush.getName(messages); } } else { displayName = brush.getName(messages); } CompatibilityUtils.setDisplayName(itemStack, displayName); } public static void updateBrushItem(Messages messages, ItemStack itemStack, String materialKey, Wand wand) { updateBrushItem(messages, itemStack, MaterialBrush.parseMaterialKey(materialKey), wand); } public static void updateBrushItem(Messages messages, ItemStack itemStack, MaterialBrush brush, Wand wand) { updateBrushName(messages, itemStack, brush, wand); Object brushNode = CompatibilityUtils.createNode(itemStack, "brush"); CompatibilityUtils.setMeta(brushNode, "key", brush.getKey()); } public void updateHotbar() { if (mage == null) return; if (!isInventoryOpen()) return; Player player = mage.getPlayer(); if (player == null) return; if (!hasStoredInventory()) return; WandMode wandMode = getMode(); if (wandMode == WandMode.INVENTORY) { PlayerInventory inventory = player.getInventory(); updateHotbar(inventory); DeprecatedUtils.updateInventory(player); } } private boolean updateHotbar(PlayerInventory playerInventory) { if (getMode() != WandMode.INVENTORY) return false; Inventory hotbar = getHotbar(); if (hotbar == null) return false; // Make sure the wand is still in the held slot ItemStack currentItem = playerInventory.getItem(heldSlot); if (currentItem == null || !currentItem.getItemMeta().equals(item.getItemMeta())) { controller.getLogger().warning("Trying to update hotbar but the wand has gone missing"); return false; } // Set hotbar items from remaining list int targetOffset = 0; for (int hotbarSlot = 0; hotbarSlot < HOTBAR_INVENTORY_SIZE; hotbarSlot++) { if (hotbarSlot == heldSlot) { targetOffset = 1; } ItemStack hotbarItem = hotbar.getItem(hotbarSlot); updateInventoryName(hotbarItem, true); playerInventory.setItem(hotbarSlot + targetOffset, hotbarItem); } return true; } private void updateInventory() { if (mage == null) return; if (!isInventoryOpen()) return; Player player = mage.getPlayer(); if (player == null) return; WandMode wandMode = getMode(); if (wandMode == WandMode.INVENTORY) { if (!hasStoredInventory()) return; PlayerInventory inventory = player.getInventory(); if (!updateHotbar(inventory)) { for (int i = 0; i < HOTBAR_SIZE; i++) { if (i != inventory.getHeldItemSlot()) { inventory.setItem(i, null); } } } updateInventory(inventory); updateName(); } else if (wandMode == WandMode.CHEST || wandMode == WandMode.SKILLS) { Inventory inventory = getDisplayInventory(); inventory.clear(); updateInventory(inventory); } } private void updateInventory(Inventory targetInventory) { // Set inventory from current page, taking into account hotbar offset int currentOffset = getHotbarSize() > 0 ? HOTBAR_SIZE : 0; List<Inventory> inventories = this.inventories; if (openInventoryPage < inventories.size()) { Inventory inventory = inventories.get(openInventoryPage); ItemStack[] contents = inventory.getContents(); for (int i = 0; i < contents.length; i++) { ItemStack inventoryItem = contents[i]; updateInventoryName(inventoryItem, false); targetInventory.setItem(currentOffset, inventoryItem); currentOffset++; } } for (;currentOffset < targetInventory.getSize() && currentOffset < PLAYER_INVENTORY_SIZE; currentOffset++) { targetInventory.setItem(currentOffset, null); } } protected static void addSpellLore(Messages messages, SpellTemplate spell, List<String> lore, com.elmakers.mine.bukkit.api.magic.Mage mage, Wand wand) { spell.addLore(messages, mage, wand, lore); } private String getInventoryTitle() { return getMessage("chest_inventory_title", "Wand"); } protected Inventory getOpenInventory() { while (openInventoryPage >= inventories.size()) { inventories.add(CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle())); } return inventories.get(openInventoryPage); } protected Inventory getDisplayInventory() { if (displayInventory == null || displayInventory.getSize() != getInventorySize()) { displayInventory = CompatibilityUtils.createInventory(null, getInventorySize(), getInventoryTitle()); } return displayInventory; } public void saveChestInventory() { if (displayInventory == null) return; Inventory openInventory = getOpenInventory(); Map<String, Integer> previousSlots = new HashMap<>(); Set<String> addedBack = new HashSet<>(); for (int i = 0; i < displayInventory.getSize(); i++) { ItemStack playerItem = displayInventory.getItem(i); String itemSpellKey = getSpell(playerItem); if (!updateSlot(i + openInventoryPage * getInventorySize(), playerItem)) { playerItem = new ItemStack(Material.AIR); displayInventory.setItem(i, playerItem); } else if (itemSpellKey != null) { addedBack.add(itemSpellKey); } // We don't want to clear items that were taken out, so save them to check later ItemStack current = openInventory.getItem(i); String spellKey = getSpell(current); if (spellKey != null) { previousSlots.put(spellKey, i); } openInventory.setItem(i, playerItem); } // Put back any items that were taken out for (Map.Entry<String, Integer> entry : previousSlots.entrySet()) { if (!addedBack.contains(entry.getKey())) { ItemStack current = openInventory.getItem(entry.getValue()); ItemStack itemStack = createSpellItem(entry.getKey(), "", false); if (current == null || current.getType() == Material.AIR) { openInventory.setItem(entry.getValue(), itemStack); } else { openInventory.addItem(itemStack); } } } } public void saveInventory() { if (mage == null) return; if (getMode() == WandMode.SKILLS) { saveChestInventory(); return; } if (!isInventoryOpen()) return; if (mage.getPlayer() == null) return; if (getMode() != WandMode.INVENTORY) return; if (!hasStoredInventory()) return; // Work-around glitches that happen if you're dragging an item on death if (mage.isDead()) return; // Fill in the hotbar Player player = mage.getPlayer(); PlayerInventory playerInventory = player.getInventory(); Inventory hotbar = getHotbar(); if (hotbar != null) { int saveOffset = 0; for (int i = 0; i < HOTBAR_SIZE; i++) { ItemStack playerItem = playerInventory.getItem(i); if (isWand(playerItem)) { saveOffset = -1; continue; } int hotbarOffset = i + saveOffset; if (hotbarOffset >= hotbar.getSize()) { // This can happen if there is somehow no wand in the wand inventory. break; } if (!updateSlot(i + saveOffset + currentHotbar * HOTBAR_INVENTORY_SIZE, playerItem)) { playerItem = new ItemStack(Material.AIR); playerInventory.setItem(i, playerItem); } hotbar.setItem(i + saveOffset, playerItem); } } // Fill in the active inventory page int hotbarOffset = getHotbarSize(); Inventory openInventory = getOpenInventory(); for (int i = 0; i < openInventory.getSize(); i++) { ItemStack playerItem = playerInventory.getItem(i + HOTBAR_SIZE); if (!updateSlot(i + hotbarOffset + openInventoryPage * getInventorySize(), playerItem)) { playerItem = new ItemStack(Material.AIR); playerInventory.setItem(i + HOTBAR_SIZE, playerItem); } openInventory.setItem(i, playerItem); } } protected boolean updateSlot(int slot, ItemStack item) { if (item == null || item.getType() == Material.AIR) return true; String spellKey = getSpell(item); if (spellKey != null) { SpellKey key = new SpellKey(spellKey); spellInventory.put(key.getBaseKey(), slot); } else { String brushKey = getBrush(item); if (brushKey != null) { brushInventory.put(brushKey, slot); } else if (mage != null) { // Must have been an item inserted directly into player's inventory? mage.giveItem(item); return false; } } return true; } @Override public int enchant(int totalLevels, com.elmakers.mine.bukkit.api.magic.Mage mage, boolean addSpells) { return randomize(totalLevels, true, mage, addSpells); } @Override public int enchant(int totalLevels, com.elmakers.mine.bukkit.api.magic.Mage mage) { return randomize(totalLevels, true, mage, true); } @Override public int enchant(int totalLevels) { return randomize(totalLevels, true, null, true); } protected int randomize(int totalLevels, boolean additive, com.elmakers.mine.bukkit.api.magic.Mage enchanter, boolean addSpells) { if (enchanter == null && mage != null) { enchanter = mage; } if (maxEnchantCount > 0 && enchantCount >= maxEnchantCount) { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("max_enchanted").replace("$wand", getName())); } return 0; } WandUpgradePath path = getPath(); if (path == null) { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("no_path").replace("$wand", getName())); } return 0; } int minLevel = path.getMinLevel(); if (totalLevels < minLevel) { if (enchanter != null && addSpells) { String levelMessage = getMessage("need_more_levels"); levelMessage = levelMessage.replace("$levels", Integer.toString(minLevel)); enchanter.sendMessage(levelMessage); } return 0; } // Just a hard-coded sanity check int maxLevel = path.getMaxLevel(); totalLevels = Math.min(totalLevels, maxLevel * 50); int addLevels = Math.min(totalLevels, maxLevel); int levels = 0; boolean modified = true; while (addLevels >= minLevel && modified) { boolean hasUpgrade = path.hasUpgrade(); WandLevel level = path.getLevel(addLevels); if (!path.canEnchant(this) && (path.hasSpells() || path.hasMaterials())) { // Check for level up WandUpgradePath nextPath = path.getUpgrade(); if (nextPath != null) { if (path.checkUpgradeRequirements(this, addSpells ? enchanter : null)) { path.upgrade(this, enchanter); } break; } else { if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("fully_enchanted").replace("$wand", getName())); } break; } } modified = level.randomizeWand(enchanter, this, additive, hasUpgrade, addSpells); totalLevels -= maxLevel; if (modified) { if (enchanter != null) { path.enchanted(enchanter); } levels += addLevels; // Check for level up WandUpgradePath nextPath = path.getUpgrade(); if (nextPath != null && path.checkUpgradeRequirements(this, null) && !path.canEnchant(this)) { path.upgrade(this, enchanter); path = nextPath; } } else if (path.canEnchant(this)) { if (enchanter != null && levels == 0 && addSpells) { String message = getMessage("require_more_levels"); enchanter.sendMessage(message); } } else if (hasUpgrade) { if (path.checkUpgradeRequirements(this, addSpells ? enchanter : null)) { path.upgrade(this, enchanter); levels += addLevels; } } else if (enchanter != null && addSpells) { enchanter.sendMessage(getMessage("fully_enchanted").replace("$wand", getName())); } addLevels = Math.min(totalLevels, maxLevel); additive = true; } if (levels > 0) { enchantCount++; setProperty("enchant_count", enchantCount); } saveState(); updateName(); updateLore(); return levels; } protected void randomize() { if (template != null && template.length() > 0) { ConfigurationSection wandConfig = controller.getWandTemplateConfiguration(template); if (wandConfig != null && wandConfig.contains("icon")) { String iconKey = wandConfig.getString("icon"); if (iconKey.contains(",")) { Random r = new Random(); String[] keys = StringUtils.split(iconKey, ','); iconKey = keys[r.nextInt(keys.length)]; } setIcon(ConfigurationUtils.toMaterialAndData(iconKey)); updateIcon(); playEffects("randomize"); } } } @Nullable public static Wand createWand(MagicController controller, String templateName) { if (controller == null) return null; Wand wand = null; try { wand = new Wand(controller, templateName); } catch (UnknownWandException ignore) { // the Wand constructor throws an exception on an unknown template } catch (Exception ex) { ex.printStackTrace(); } return wand; } @Nullable public static Wand createWand(MagicController controller, ItemStack itemStack) { if (controller == null) return null; Wand wand = null; try { wand = controller.getWand(InventoryUtils.makeReal(itemStack)); wand.saveState(); wand.updateName(); } catch (Exception ex) { ex.printStackTrace(); } return wand; } @Override public boolean add(com.elmakers.mine.bukkit.api.wand.Wand other) { if (other instanceof Wand) { return add((Wand)other); } return false; } public boolean add(Wand other) { return add(other, this.mage); } @Override public boolean add(com.elmakers.mine.bukkit.api.wand.Wand other, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (other instanceof Wand) { return add((Wand)other, mage); } return false; } public boolean add(Wand other, com.elmakers.mine.bukkit.api.magic.Mage mage) { if (!isModifiable()) { // Only allow upgrading a modifiable wand via an upgrade item // and only if the paths match. if (!other.isUpgrade() || other.path == null || path == null || other.path.isEmpty() || path.isEmpty() || !other.path.equals(path)) { return false; } } // Can't combine limited-use wands if (hasUses || other.hasUses) { return false; } if (isHeroes || other.isHeroes) { return false; } ConfigurationSection templateConfig = controller.getWandTemplateConfiguration(other.getTemplateKey()); // Check for forced upgrades if (other.isForcedUpgrade()) { if (templateConfig == null) { return false; } templateConfig = ConfigurationUtils.cloneConfiguration(templateConfig); templateConfig.set("name", templateConfig.getString("upgrade_name")); templateConfig.set("description", templateConfig.getString("upgrade_description")); templateConfig.set("force", null); templateConfig.set("upgrade", null); templateConfig.set("icon", templateConfig.getString("upgrade_icon")); templateConfig.set("indestructible", null); templateConfig.set("upgrade_icon", null); configure(templateConfig); return true; } // Don't allow upgrades from an item on a different path if (other.isUpgrade() && other.path != null && !other.path.isEmpty() && (this.path == null || !this.path.equals(other.path))) { return false; } ConfigurationSection upgradeConfig = ConfigurationUtils.cloneConfiguration(other.getEffectiveConfiguration()); upgradeConfig.set("id", null); upgradeConfig.set("indestructible", null); upgradeConfig.set("upgrade", null); upgradeConfig.set("icon", other.upgradeIcon == null ? null : other.upgradeIcon.getKey()); upgradeConfig.set("upgrade_icon", null); upgradeConfig.set("template", other.upgradeTemplate); Messages messages = controller.getMessages(); if (other.rename && templateConfig != null) { String newName = messages.get("wands." + other.template + ".name"); newName = templateConfig.getString("name", newName); upgradeConfig.set("name", newName); } else { upgradeConfig.set("name", null); } if (other.renameDescription && templateConfig != null) { String newDescription = messages.get("wands." + other.template + ".description"); newDescription = templateConfig.getString("description", newDescription); upgradeConfig.set("description", newDescription); } else { upgradeConfig.set("description", null); } return upgrade(upgradeConfig); } public boolean isForcedUpgrade() { return isUpgrade && forceUpgrade; } public boolean keepOnDeath() { return keep; } public static WandMode parseWandMode(String modeString, WandMode defaultValue) { if (modeString != null && !modeString.isEmpty()) { try { defaultValue = WandMode.valueOf(modeString.toUpperCase()); } catch (Exception ignored) { } } return defaultValue; } public static WandAction parseWandAction(String actionString, WandAction defaultValue) { if (actionString != null && !actionString.isEmpty()) { try { defaultValue = WandAction.valueOf(actionString.toUpperCase()); } catch (Exception ignored) { } } return defaultValue; } private void updateActiveMaterial() { if (mage == null) return; if (activeBrush == null) { mage.clearBuildingMaterial(); } else { com.elmakers.mine.bukkit.api.block.MaterialBrush brush = mage.getBrush(); brush.update(activeBrush); } } public void cycleActive(int direction) { Player player = mage != null ? mage.getPlayer() : null; if (player != null && player.isSneaking()) { com.elmakers.mine.bukkit.api.spell.Spell activeSpell = getActiveSpell(); boolean cycleMaterials = false; if (activeSpell != null) { cycleMaterials = activeSpell.usesBrushSelection(); } if (cycleMaterials) { cycleMaterials(direction); } else { cycleSpells(direction); } } else { cycleSpells(direction); } } public void toggleInventory() { if (mage != null && mage.cancelSelection()) { mage.playSoundEffect(noActionSound); return; } Player player = mage == null ? null : mage.getPlayer(); boolean isSneaking = player != null && player.isSneaking(); Spell currentSpell = getActiveSpell(); if (getBrushMode() == WandMode.CHEST && brushSelectSpell != null && !brushSelectSpell.isEmpty() && isSneaking && currentSpell != null && currentSpell.usesBrushSelection()) { Spell brushSelect = mage.getSpell(brushSelectSpell); if (brushSelect != null) { brushSelect.cast(); return; } } if (!hasInventory) { if (activeSpell == null || activeSpell.length() == 0) { // Sanity check, so it'll switch to inventory next time updateHasInventory(); if (spells.size() > 0) { setActiveSpell(spells.iterator().next()); } } updateName(); return; } if (!isInventoryOpen()) { openInventory(); } else { closeInventory(); } } public void updateHasInventory() { int inventorySize = getSpells().size() + getBrushes().size(); hasInventory = inventorySize > 1 || (inventorySize == 1 && hasSpellProgression) || autoFill; } public void cycleInventory() { cycleInventory(1); } public void cycleInventory(int direction) { if (!hasInventory) { return; } if (isInventoryOpen()) { saveInventory(); int inventoryCount = inventories.size(); setOpenInventoryPage(inventoryCount == 0 ? 0 : (openInventoryPage + inventoryCount + direction) % inventoryCount); updateInventory(); if (mage != null && inventories.size() > 1) { if (!playPassiveEffects("cycle") && inventoryCycleSound != null) { mage.playSoundEffect(inventoryCycleSound); } DeprecatedUtils.updateInventory(mage.getPlayer()); } } } @Override public void cycleHotbar() { cycleHotbar(1); } public void cycleHotbar(int direction) { if (!hasInventory || getMode() != WandMode.INVENTORY) { return; } if (isInventoryOpen() && mage != null && hotbars.size() > 1) { saveInventory(); int hotbarCount = hotbars.size(); setCurrentHotbar(hotbarCount == 0 ? 0 : (currentHotbar + hotbarCount + direction) % hotbarCount); updateHotbar(); if (!playPassiveEffects("cycle") && inventoryCycleSound != null) { mage.playSoundEffect(inventoryCycleSound); } sendMessage("hotbar_changed"); updateHotbarStatus(); DeprecatedUtils.updateInventory(mage.getPlayer()); } } public void openInventory() { if (mage == null) return; if (System.currentTimeMillis() < mage.getWandDisableTime()) return; WandMode wandMode = getMode(); if (wandMode == WandMode.CHEST || wandMode == WandMode.SKILLS) { inventoryIsOpen = true; if (!playPassiveEffects("open") && inventoryOpenSound != null) { mage.playSoundEffect(inventoryOpenSound); } updateInventory(); mage.getPlayer().openInventory(getDisplayInventory()); } else if (wandMode == WandMode.INVENTORY) { if (hasStoredInventory()) return; if (storeInventory()) { inventoryIsOpen = true; showActiveIcon(true); if (!playPassiveEffects("open") && inventoryOpenSound != null) { mage.playSoundEffect(inventoryOpenSound); } updateInventory(); updateHotbarStatus(); } } } @Override public void closeInventory() { closeInventory(true); } public void closeInventory(boolean closePlayerInventory) { if (!isInventoryOpen()) return; controller.disableItemSpawn(); inventoryWasOpen = true; WandMode mode = getMode(); try { saveInventory(); updateSpellInventory(); updateBrushInventory(); inventoryIsOpen = false; if (mage != null) { if (!playPassiveEffects("close") && inventoryCloseSound != null) { mage.playSoundEffect(inventoryCloseSound); } if (mode == WandMode.INVENTORY) { restoreInventory(); showActiveIcon(false); } else if (closePlayerInventory) { mage.getPlayer().closeInventory(); } // Check for items the player might've glitched onto their body... PlayerInventory inventory = mage.getPlayer().getInventory(); ItemStack testItem = inventory.getHelmet(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setHelmet(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getBoots(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setBoots(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getLeggings(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setLeggings(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } testItem = inventory.getChestplate(); if (isSpell(testItem) || isBrush(testItem)) { inventory.setChestplate(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } // This is kind of a hack :( testItem = inventory.getItemInOffHand(); if ((isSpell(testItem) && !isSkill(testItem)) || isBrush(testItem)) { inventory.setItemInOffHand(new ItemStack(Material.AIR)); DeprecatedUtils.updateInventory(mage.getPlayer()); } } } catch (Throwable ex) { restoreInventory(); } if (mode == WandMode.INVENTORY && mage != null && closePlayerInventory) { try { mage.getPlayer().closeInventory(); } catch (Throwable ex) { ex.printStackTrace(); } } controller.enableItemSpawn(); inventoryWasOpen = false; } @Override public boolean fill(Player player) { return fill(player, 0); } @Override public boolean fill(Player player, int maxLevel) { // This is for the editor, it saves using player logins and is *not* // directly related to mage ids. This has to use player id. String playerId = player.getUniqueId().toString(); closeInventory(); // Update the inventory to make sure we don't overwrite slots of current spells if (this.mage != null) { buildInventory(); } Collection<String> currentSpells = new ArrayList<>(getSpells()); for (String spellKey : currentSpells) { SpellTemplate spell = controller.getSpellTemplate(spellKey); boolean removeSpell = !spell.hasCastPermission(player); String creatorId = spell.getCreatorId(); removeSpell = removeSpell || (FILL_CREATOR && (creatorId == null || !playerId.equals(creatorId))); if (removeSpell) { removeSpell(spellKey); } } Collection<SpellTemplate> allSpells = controller.getPlugin().getSpellTemplates(); // Hack to prevent messaging Mage mage = this.mage; this.mage = null; for (SpellTemplate spell : allSpells) { String key = spell.getKey(); if (maxLevel > 0 && spell.getSpellKey().getLevel() > maxLevel) { continue; } if (key.startsWith("heroes*")) { continue; } String creatorId = spell.getCreatorId(); if (FILL_CREATOR && (creatorId == null || !playerId.equals(creatorId))) { continue; } if (spell.hasCastPermission(player) && spell.hasIcon() && !spell.isHidden()) { addSpell(key); } } this.mage = mage; updateSpellInventory(); updateBrushInventory(); if (this.mage != null) { buildInventory(); } if (!FILL_CREATOR) { if (autoFill) setProperty("fill", false); autoFill = false; } saveState(); return true; } protected void checkActiveMaterial() { if (activeBrush == null || activeBrush.length() == 0) { Set<String> materials = getBrushes(); if (materials.size() > 0) { activeBrush = materials.iterator().next(); } } } @Override public boolean addItem(ItemStack item) { if (isUpgrade) return false; if (isModifiable() && isSpell(item) && !isSkill(item)) { String spell = getSpell(item); SpellKey spellKey = new SpellKey(spell); Integer currentLevel = spellLevels.get(spellKey.getBaseKey()); if ((currentLevel == null || currentLevel < spellKey.getLevel()) && addSpell(spell)) { return true; } } else if (isModifiable() && isBrush(item)) { String materialKey = getBrush(item); Set<String> materials = getBrushes(); if (!materials.contains(materialKey) && addBrush(materialKey)) { return true; } } else if (isUpgrade(item)) { Wand wand = controller.getWand(item); return this.add(wand); } if (mage != null && !mage.isAtMaxSkillPoints() && controller.skillPointItemsEnabled()) { Integer sp = getSP(item); if (sp != null) { int amount = (int)Math.floor(mage.getEarnMultiplier() * sp * item.getAmount()); mage.addSkillPoints(amount); return true; } } return false; } protected void updateEffects() { updateEffects(mage); } public void updateEffects(Mage mage) { if (mage == null) return; Player player = mage.getPlayer(); if (player == null) return; // Update Bubble effects effects if (effectBubbles && effectColor != null) { Location potionEffectLocation = player.getLocation(); potionEffectLocation.setX(potionEffectLocation.getX() + random.nextDouble() - 0.5); potionEffectLocation.setY(potionEffectLocation.getY() + random.nextDouble() * player.getEyeHeight()); potionEffectLocation.setZ(potionEffectLocation.getZ() + random.nextDouble() - 0.5); EffectPlayer.displayParticle(Particle.SPELL_MOB, potionEffectLocation, 0, 0, 0, 0, 0, 1, effectColor.getColor(), null, (byte)0, 24); } Location location = mage.getLocation(); long now = System.currentTimeMillis(); boolean playEffects = !activeEffectsOnly || inventoryIsOpen || isInOffhand; if (playEffects && effectParticle != null && effectParticleInterval > 0 && effectParticleCount > 0) { boolean velocityCheck = true; if (effectParticleMinVelocity > 0) { double velocitySquared = effectParticleMinVelocity * effectParticleMinVelocity; Vector velocity = mage.getVelocity().clone(); velocity.setY(0); double speedSquared = velocity.lengthSquared(); velocityCheck = (speedSquared > velocitySquared); } if (velocityCheck && (lastParticleEffect == 0 || now > lastParticleEffect + effectParticleInterval)) { lastParticleEffect = now; Location effectLocation = player.getLocation(); Location eyeLocation = player.getEyeLocation(); effectLocation.setY(eyeLocation.getY() + effectParticleOffset); if (effectPlayer == null) { effectPlayer = new EffectRing(controller.getPlugin()); effectPlayer.setParticleCount(1); effectPlayer.setIterations(1); effectPlayer.setParticleOffset(0, 0, 0); } effectPlayer.setMaterial(location.getBlock().getRelative(BlockFace.DOWN)); if (effectParticleData == 0) { effectPlayer.setColor(getEffectColor()); } else { effectPlayer.setColor(null); } effectPlayer.setParticleType(effectParticle); effectPlayer.setParticleData(effectParticleData); effectPlayer.setSize(effectParticleCount); effectPlayer.setRadius((float)effectParticleRadius); effectPlayer.start(effectLocation, null); } } if (castSpell != null && castInterval > 0) { if (lastSpellCast == 0 || now > lastSpellCast + castInterval) { boolean velocityCheck = true; if (castMinVelocity > 0) { double velocitySquared = castMinVelocity * castMinVelocity; Vector velocity = mage.getVelocity(); if (castVelocityDirection != null) { velocity = velocity.clone().multiply(castVelocityDirection); // This is kind of a hack to make jump-detection work. if (castVelocityDirection.getY() < 0) { velocityCheck = velocity.getY() < 0; } else { velocityCheck = velocity.getY() > 0; } } if (velocityCheck) { double speedSquared = velocity.lengthSquared(); velocityCheck = (speedSquared > velocitySquared); } } if (velocityCheck) { lastSpellCast = now; Spell spell = mage.getSpell(castSpell); if (spell != null) { if (castParameters == null) { castParameters = new MemoryConfiguration(); } castParameters.set("passive", true); mage.setCostFree(true); mage.setQuiet(true); try { spell.cast(castParameters); } catch (Exception ex) { controller.getLogger().log(Level.WARNING, "Error casting aura spell " + spell.getKey(), ex); } mage.setCostFree(false); mage.setQuiet(false); } } } } if (playEffects && effectSound != null && controller.soundsEnabled() && effectSoundInterval > 0) { if (lastSoundEffect == 0 || now > lastSoundEffect + effectSoundInterval) { lastSoundEffect = now; effectSound.play(controller.getPlugin(), mage.getPlayer()); } } } protected void updateDurability() { int maxDurability = item.getType().getMaxDurability(); if (maxDurability > 0 && effectiveManaMax > 0) { int durability = (short)(getMana() * maxDurability / effectiveManaMax); durability = maxDurability - durability; if (durability >= maxDurability) { durability = maxDurability - 1; } else if (durability < 0) { durability = 0; } item.setDurability((short)durability); } } public boolean usesXPBar() { return (usesCurrency() && currencyMode.useXP()) || (usesMana() && manaMode.useXP()); } public boolean usesXPNumber() { return (usesCurrency() && currencyMode.useXPNumber()) || (usesMana() && manaMode.useXP()); } public boolean hasSpellProgression() { return hasSpellProgression; } public boolean usesXPDisplay() { return usesXPBar() || usesXPNumber(); } @Override public void updateMana() { Player player = mage == null ? null : mage.getPlayer(); if (player == null) return; float mana = getMana(); if (usesMana()) { if (manaMode.useGlow()) { if (mana == effectiveManaMax) { CompatibilityUtils.addGlow(item); } else { CompatibilityUtils.removeGlow(item); } } if (manaMode.useDurability()) { updateDurability(); } } if (usesXPDisplay()) { int playerLevel = player.getLevel(); float playerProgress = player.getExp(); if (usesMana() && manaMode.useXPNumber()) { playerLevel = (int) mana; } if (usesMana() && manaMode.useXPBar()) { playerProgress = Math.min(Math.max(0, mana / effectiveManaMax), 1); } if (usesCurrency() && currencyMode.useXPNumber()) { playerLevel = (int)Math.ceil(currencyDisplay.getBalance(mage, this)); } mage.sendExperience(playerProgress, playerLevel); } } @Override public boolean isInventoryOpen() { return mage != null && inventoryIsOpen; } // Somewhat hacky method to handle inventory close event knowing that this was a wand inventory that just closed. public boolean wasInventoryOpen() { return inventoryWasOpen; } @Override public void unbind() { if (!bound) return; com.elmakers.mine.bukkit.api.magic.Mage owningMage = this.mage; deactivate(); if (ownerId != null) { if (owningMage == null || !owningMage.getId().equals(ownerId)) { owningMage = controller.getRegisteredMage(ownerId); } if (owningMage != null) { owningMage.unbind(this); } ownerId = null; } bound = false; owner = null; setProperty("bound", false); setProperty("owner", null); setProperty("owner_id", null); saveState(); updateLore(); updateName(); } @Override public void bind() { if (bound) return; Mage holdingMage = mage; deactivate(); bound = true; setProperty("bound", true); saveState(); if (holdingMage != null) { holdingMage.checkWand(); } } @Override public void deactivate() { deactivate(true); } public void deactivate(boolean closePlayerInventory) { if (mage == null) return; // Play deactivate FX playPassiveEffects("deactivate"); // Cancel effects if (effectContext != null) { int cancelDelay = getInt("cancel_effects_delay", 0); if (cancelDelay == 0) { effectContext.cancelEffects(); } else { Plugin plugin = controller.getPlugin(); final WandEffectContext context = effectContext; plugin.getServer().getScheduler().runTaskLater(plugin, new Runnable() { @Override public void run() { context.cancelEffects(); } }, cancelDelay * 20 / 1000); } } Mage mage = this.mage; if (isInventoryOpen()) { closeInventory(closePlayerInventory); } showActiveIcon(false); storedInventory = null; if (usesXPNumber() || usesXPBar()) { mage.resetSentExperience(); } saveState(); mage.deactivateWand(this); this.mage = null; updateMaxMana(true); } @Nullable @Override public Spell getActiveSpell() { if (mage == null) return null; String activeSpellKey = getActiveSpellKey(); if (activeSpellKey == null || activeSpellKey.length() == 0) return null; return mage.getSpell(activeSpellKey); } @Nullable public Spell getAlternateSpell() { if (mage == null || alternateSpell == null || alternateSpell.length() == 0) return null; return mage.getSpell(alternateSpell); } @Nullable public Spell getAlternateSpell2() { if (mage == null || alternateSpell2 == null || alternateSpell2.length() == 0) return null; return mage.getSpell(alternateSpell2); } @Nullable @Override public SpellTemplate getBaseSpell(String spellName) { return getBaseSpell(new SpellKey(spellName)); } @Nullable public SpellTemplate getBaseSpell(SpellKey key) { if (!spells.contains(key.getBaseKey())) return null; SpellKey baseKey = new SpellKey(key.getBaseKey(), getSpellLevel(key.getBaseKey())); return controller.getSpellTemplate(baseKey.getKey()); } @Override public String getActiveSpellKey() { String activeSpellKey = activeSpell; Integer level = spellLevels.get(activeSpellKey); if (level != null) { activeSpellKey = new SpellKey(activeSpellKey, level).getKey(); } return activeSpellKey; } @Override public String getActiveBrushKey() { return activeBrush; } @Override public void damageDealt(double damage, Entity target) { if (manaPerDamage > 0) { int manaMax = getEffectiveManaMax(); float mana = getMana(); if (manaMax > 0 && mana < manaMax) { setMana(Math.min(manaMax, mana + (float)damage * manaPerDamage)); updateMana(); } } } public boolean alternateCast() { return cast(getAlternateSpell()); } public boolean alternateCast2() { return cast(getAlternateSpell2()); } @Override public boolean cast() { return cast(getActiveSpell(), null); } @Override public boolean cast(String[] parameters) { return cast(getActiveSpell(), parameters); } public boolean cast(Spell spell) { return cast(spell, null); } public boolean cast(Spell spell, String[] parameters) { if (spell != null) { Collection<String> castParameters = null; if (castOverrides != null && castOverrides.size() > 0) { castParameters = new ArrayList<>(); for (Map.Entry<String, String> entry : castOverrides.entrySet()) { String[] key = StringUtils.split(entry.getKey(), '.'); if (key.length == 0) continue; if (key.length == 2 && !key[0].equals("default") && !key[0].equals(spell.getSpellKey().getBaseKey()) && !key[0].equals(spell.getSpellKey().getKey())) { continue; } castParameters.add(key.length == 2 ? key[1] : key[0]); castParameters.add(entry.getValue()); } } if (parameters != null) { if (castParameters == null) { castParameters = new ArrayList<>(); } for (String parameter : parameters) { castParameters.add(parameter); } } if (spell.cast(castParameters == null ? null : castParameters.toArray(EMPTY_PARAMETERS))) { Color spellColor = spell.getColor(); use(); if (spellColor != null && this.effectColor != null) { this.effectColor = this.effectColor.mixColor(spellColor, effectColorSpellMixWeight); setProperty("effect_color", effectColor.toString()); // Note that we don't save this change. // The hope is that the wand will get saved at some point later // And we don't want to trigger NBT writes every spell cast. // And the effect color morphing isn't all that important if a few // casts get lost. } updateHotbarStatus(); return true; } } return false; } protected void use() { if (hasUses) { findItem(); ItemStack item = getItem(); if (item.getAmount() > 1) { item.setAmount(item.getAmount() - 1); } else { if (uses > 0) { uses--; } if (uses <= 0 && mage != null) { // If the wand is not currently active it will be destroyed on next activate Player player = mage.getPlayer(); deactivate(); PlayerInventory playerInventory = player.getInventory(); if (item.getAmount() > 1) { item.setAmount(item.getAmount() - 1); } else { if (isInOffhand) { playerInventory.setItemInOffHand(new ItemStack(Material.AIR, 1)); } else { playerInventory.setItemInMainHand(new ItemStack(Material.AIR, 1)); } } DeprecatedUtils.updateInventory(player); } setProperty("uses", uses); saveState(); updateName(); updateLore(); } } } // Taken from NMS HumanEntity public static int getExpToLevel(int expLevel) { return expLevel >= 30 ? 112 + (expLevel - 30) * 9 : (expLevel >= 15 ? 37 + (expLevel - 15) * 5 : 7 + expLevel * 2); } public static int getExperience(int expLevel, float expProgress) { int xp = 0; for (int level = 0; level < expLevel; level++) { xp += Wand.getExpToLevel(level); } return xp + (int) (expProgress * Wand.getExpToLevel(expLevel)); } protected void updateHotbarStatus() { Player player = mage == null ? null : mage.getPlayer(); if (player != null && LiveHotbar && getMode() == WandMode.INVENTORY && isInventoryOpen()) { mage.updateHotbarStatus(); } } @Override public boolean tickMana() { if (isHeroes) { HeroesManager heroes = controller.getHeroes(); if (heroes != null && mage != null && mage.isPlayer()) { Player player = mage.getPlayer(); effectiveManaMax = heroes.getMaxMana(player); effectiveManaRegeneration = heroes.getManaRegen(player); setManaMax(effectiveManaMax); setManaRegeneration(effectiveManaRegeneration); setMana(heroes.getMana(player)); return true; } return false; } return super.tickMana(); } @Override public void tick() { if (mage == null) return; Player player = mage.getPlayer(); if (player == null) return; super.tick(); if (usesMana() && !isInOffhand) { updateMana(); } if (player.isBlocking() && blockMageCooldown > 0) { mage.setRemainingCooldown(blockMageCooldown); } // Update hotbar glow if (!isInOffhand) { updateHotbarStatus(); } if (!passive) { updateEffects(); } } @Override public void armorUpdated() { updateMaxMana(true); } protected void updateMaxMana(boolean updateLore) { if (isHeroes) return; if (!hasOwnMana() && mageClass != null) { if (mageClass.updateMaxMana(mage) && updateLore) { updateLore(); } effectiveManaMax = mageClass.getEffectiveManaMax(); effectiveManaRegeneration = mageClass.getEffectiveManaRegeneration(); } else if (super.updateMaxMana(mage) && updateLore) { updateLore(); } } public void cycleSpells(int direction) { ArrayList<String> spells = new ArrayList<>(this.spells); if (spells.size() == 0) return; if (activeSpell == null) { setActiveSpell(spells.get(0)); return; } int spellIndex = 0; for (int i = 0; i < spells.size(); i++) { if (spells.get(i).equals(activeSpell)) { spellIndex = i; break; } } spellIndex = (spellIndex + direction) % spells.size(); setActiveSpell(spells.get(spellIndex)); } public void cycleMaterials(int direction) { Set<String> materialsSet = getBrushes(); ArrayList<String> materials = new ArrayList<>(materialsSet); if (materials.size() == 0) return; if (activeBrush == null) { setActiveBrush(StringUtils.split(materials.get(0), '@')[0]); return; } int materialIndex = 0; for (int i = 0; i < materials.size(); i++) { if (StringUtils.split(materials.get(i),'@')[0].equals(activeBrush)) { materialIndex = i; break; } } materialIndex = (materialIndex + direction) % materials.size(); setActiveBrush(StringUtils.split(materials.get(materialIndex), '@')[0]); } @Nullable public Mage getActiveMage() { // TODO: Duplicate of #getMage() return mage; } public void setActiveMage(com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage instanceof Mage) { this.mage = (Mage)mage; armorUpdated(); } } @Nullable @Override public Color getEffectColor() { return effectColor == null ? null : effectColor.getColor(); } public Particle getEffectParticle() { return effectParticle; } @Nullable @Override public String getEffectParticleName() { return effectParticle == null ? null : effectParticle.name(); } @Nullable public Inventory getHotbar() { if (this.hotbars.size() == 0) return null; if (currentHotbar < 0 || currentHotbar >= this.hotbars.size()) { setCurrentHotbar(0); } return this.hotbars.get(currentHotbar); } public int getHotbarCount() { if (getMode() != WandMode.INVENTORY) return 0; return hotbars.size(); } public List<Inventory> getHotbars() { return hotbars; } @Override public boolean isQuickCastDisabled() { return quickCastDisabled; } public boolean isManualQuickCastDisabled() { return manualQuickCastDisabled; } @Override public boolean isQuickCast() { return quickCast; } public WandMode getMode() { return mode; } public WandMode getBrushMode() { return brushMode; } public void setMode(WandMode mode) { this.mode = mode; } public void setBrushMode(WandMode mode) { this.brushMode = mode; } @Override public boolean showCastMessages() { return quietLevel == 0; } @Override public boolean showMessages() { return quietLevel < 2; } public boolean isStealth() { return quietLevel > 2; } @Override public void setPath(String path) { String oldPath = this.path; this.path = path; setProperty("path", path); // Handle the case of a path upgrade meaning there are suddenly more spells or brushes available boolean updateInventory = limitBrushesToPath || limitSpellsToPath; if (!oldPath.equals(path) && updateInventory) { closeInventory(); if (limitSpellsToPath) { loadSpells(); } if (limitBrushesToPath) { loadBrushes(); } buildInventory(); } } /* * Public API Implementation */ @Override public boolean isLost(com.elmakers.mine.bukkit.api.wand.LostWand lostWand) { return this.id != null && this.id.equals(lostWand.getId()); } @Override public LostWand makeLost(Location location) { checkId(); saveState(); return new LostWand(this, location); } protected void showActiveIcon(boolean show) { if (this.icon == null || this.inactiveIcon == null || this.inactiveIcon.getMaterial() == Material.AIR || this.inactiveIcon.getMaterial() == null) return; if (this.icon.getMaterial() == Material.AIR || this.icon.getMaterial() == null) { this.icon.setMaterial(DefaultWandMaterial); } if (show) { if (inactiveIconDelay > 0) { Plugin plugin = controller.getPlugin(); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { findItem(); icon.applyToItem(item); } }, inactiveIconDelay * 20 / 1000); } else { findItem(); icon.applyToItem(item); } } else { findItem(); inactiveIcon.applyToItem(this.item); } } public boolean activateOffhand(Mage mage) { return activate(mage, true); } @Override @Deprecated public void activate(com.elmakers.mine.bukkit.api.magic.Mage mage) { if (mage instanceof Mage) { activate((Mage)mage); } } public boolean activate(Mage mage) { return activate(mage, false); } public boolean activate(Mage mage, boolean offhand) { if (mage == null) return false; Player player = mage.getPlayer(); if (player == null) return false; if (!controller.hasWandPermission(player, this)) return false; InventoryView openInventory = player.getOpenInventory(); InventoryType inventoryType = openInventory.getType(); if (inventoryType == InventoryType.ENCHANTING || inventoryType == InventoryType.ANVIL) return false; if (hasUses && uses <= 0) { if (offhand) { player.getInventory().setItemInOffHand(new ItemStack(Material.AIR, 1)); } else { player.getInventory().setItemInMainHand(new ItemStack(Material.AIR, 1)); } return false; } if (!canUse(player)) { mage.sendMessage(getMessage("bound").replace("$name", getOwner())); return false; } if (this.isUpgrade) { controller.getLogger().warning("Activated an upgrade item- this shouldn't happen"); return false; } WandPreActivateEvent preActivateEvent = new WandPreActivateEvent(mage, this); Bukkit.getPluginManager().callEvent(preActivateEvent); if (preActivateEvent.isCancelled()) { return false; } boolean needsSave = false; if (hasId) { needsSave = this.checkId() || needsSave; } else { setProperty("id", null); } this.mage = mage; this.isInOffhand = offhand; this.heldSlot = offhand ? OFFHAND_SLOT : player.getInventory().getHeldItemSlot(); if (mageClassKeys != null && !mageClassKeys.isEmpty()) { MageClass mageClass = null; for (String mageClassKey : mageClassKeys) { mageClass = mage.getClass(mageClassKey); if (mageClass != null) break; } if (mageClass == null) { Integer lastSlot = mage.getLastActivatedSlot(); if (!offhand && (lastSlot == null || lastSlot != player.getInventory().getHeldItemSlot())) { mage.setLastActivatedSlot(player.getInventory().getHeldItemSlot()); mage.sendMessage(controller.getMessages().get("mage.no_class").replace("$name", getName())); } return false; } setMageClass(mageClass); if (!offhand) { mage.setActiveClass(mageClass.getKey()); } } MageParameters wrapped = new MageParameters(mage); wrapped.wrap(configuration); load(wrapped); // This double-load here is not really ideal. // Seems hard to prevent without merging Wand construction and activation, though. loadProperties(); mage.setLastActivatedSlot(player.getInventory().getHeldItemSlot()); // Check for replacement template String replacementTemplate = getString("replace_on_activate", ""); if (!replacementTemplate.isEmpty() && !replacementTemplate.equals(template)) { playEffects("replace"); setTemplate(replacementTemplate); loadProperties(); saveState(); return activate(mage, offhand); } // Since these wands can't be opened we will just show them as open when held // We have to delay this 1 tick so it happens after the Mage has accepted the Wand if ((getMode() != WandMode.INVENTORY || offhand) && controller.isInitialized()) { Plugin plugin = controller.getPlugin(); plugin.getServer().getScheduler().scheduleSyncDelayedTask(plugin, new Runnable() { @Override public void run() { showActiveIcon(true); playPassiveEffects("open"); } }, 1); } // Check for an empty wand and auto-fill if (!isUpgrade && (controller.fillWands() || autoFill)) { fill(mage.getPlayer(), controller.getMaxWandFillLevel()); needsSave = true; } if (isHeroes) { HeroesManager heroes = controller.getHeroes(); if (heroes != null) { Set<String> skills = heroes.getSkills(player); Collection<String> currentSpells = new ArrayList<>(getSpells()); for (String spellKey : currentSpells) { if (spellKey.startsWith("heroes*") && !skills.contains(spellKey.substring(7))) { removeSpell(spellKey); } } // Hack to prevent messaging this.mage = null; for (String skillKey : skills) { String heroesKey = "heroes*" + skillKey; if (!spells.contains(heroesKey)) { addSpell(heroesKey); } } this.mage = mage; } } // Check for auto-organize if (autoOrganize && !isUpgrade) { organizeInventory(mage); needsSave = true; } // Check for auto-alphabetize if (autoAlphabetize && !isUpgrade) { alphabetizeInventory(); needsSave = true; } boolean forceUpdate = false; if (checkInventoryForUpgrades()) { forceUpdate = true; needsSave = true; } // Check for auto-bind if (bound) { String mageName = ChatColor.stripColor(mage.getPlayer().getDisplayName()); String mageId = mage.getId(); boolean ownerRenamed = owner != null && ownerId != null && ownerId.equals(mageId) && !owner.equals(mageName); if (ownerId == null || ownerId.length() == 0 || owner == null || ownerRenamed) { takeOwnership(mage.getPlayer()); needsSave = true; } } // Check for randomized wands if (randomizeOnActivate) { randomize(); randomizeOnActivate = false; forceUpdate = true; needsSave = true; } // Don't build the inventory until activated so we can take Mage boosts into account if (offhand) { mage.setOffhandWand(this); } else { mage.setActiveWand(this); } buildInventory(); updateMaxMana(false); tick(); if (!isInOffhand) { updateMana(); } checkActiveMaterial(); if (needsSave) { saveState(); } updateActiveMaterial(); updateName(); updateLore(); // Play activate FX playPassiveEffects("activate"); lastSoundEffect = 0; lastParticleEffect = 0; lastSpellCast = 0; if (forceUpdate) { DeprecatedUtils.updateInventory(player); } return true; } public boolean checkInventoryForUpgrades() { boolean updated = false; Player player = mage == null ? null : mage.getPlayer(); if (player == null || mage.hasStoredInventory()) return false; // Check for spell or other special icons in the player's inventory Inventory inventory = player.getInventory(); ItemStack[] items = inventory.getContents(); for (int i = 0; i < items.length; i++) { ItemStack item = items[i]; if (addItem(item)) { inventory.setItem(i, null); updated = true; } } return updated; } private void setOpenInventoryPage(int page) { this.openInventoryPage = page; this.setProperty("page", page); } @Override public boolean organizeInventory() { if (mage != null) { return organizeInventory(mage); } return false; } @Override public boolean organizeInventory(com.elmakers.mine.bukkit.api.magic.Mage mage) { WandOrganizer organizer = new WandOrganizer(this, mage); closeInventory(); organizer.organize(); setOpenInventoryPage(0); setCurrentHotbar(currentHotbar); if (autoOrganize) setProperty("organize", false); autoOrganize = false; updateSpellInventory(); updateBrushInventory(); if (this.mage != null) { buildInventory(); } return true; } @Override public boolean alphabetizeInventory() { WandOrganizer organizer = new WandOrganizer(this); closeInventory(); organizer.alphabetize(); setOpenInventoryPage(0); setCurrentHotbar(0); if (autoAlphabetize) setProperty("alphabetize", false); autoAlphabetize = false; updateSpellInventory(); updateBrushInventory(); if (mage != null) { buildInventory(); } return true; } @Override public com.elmakers.mine.bukkit.api.wand.Wand duplicate() { ItemStack newItem = InventoryUtils.getCopy(item); Wand newWand = controller.getWand(newItem); newWand.saveState(); return newWand; } @Override @Deprecated public boolean configure(Map<String, Object> properties) { Map<Object, Object> convertedProperties = new HashMap<>(properties); configure(ConfigurationUtils.toConfigurationSection(convertedProperties)); return true; } @Override public void updated() { if (suspendUpdate) return; loadProperties(); if (mage != null) { buildInventory(); if (isInventoryOpen()) { updateInventory(); } } saveState(); updateMaxMana(false); updateName(); updateLore(); } @Override public boolean isLocked() { return this.locked; } @Override public boolean upgradesAllowed() { return !this.locked || this.lockedAllowUpgrades; } @Override public void unlock() { locked = false; setProperty("locked", false); } public boolean isPassive() { return passive; } @Override public boolean canUse(Player player) { if (!bound || ownerId == null || ownerId.length() == 0) return true; if (controller.hasPermission(player, "Magic.wand.override_bind", false)) return true; String playerId = controller.getMageIdentifier().fromEntity(player); if (ownerId.equalsIgnoreCase(playerId)) { return true; } // Fall back to checking the UUID rather than the mage ID // This can be removed when all AMC wands have been migrated return ownerId.equals(player.getUniqueId().toString()); } @Override public boolean addSpell(String spellName) { if (!isModifiable()) return false; return forceAddSpell(spellName); } @Override public boolean forceAddSpell(String spellName) { SpellTemplate template = controller.getSpellTemplate(spellName); if (template == null) { return false; } SpellKey spellKey = template.getSpellKey(); if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } suspendUpdate = true; if (!super.addSpell(spellName)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); ItemStack spellItem = createSpellItem(spellKey.getKey()); if (spellItem == null) { return false; } int level = spellKey.getLevel(); int inventoryCount = inventories.size(); int spellCount = spells.size(); // Look for existing spells for spell upgrades Integer inventorySlot = spellInventory.get(spellKey.getBaseKey()); clearSlot(inventorySlot); setSpellLevel(spellKey.getBaseKey(), level); spells.add(spellKey.getBaseKey()); if (activeSpell == null || activeSpell.isEmpty()) { setActiveSpell(spellKey.getBaseKey()); } addToInventory(spellItem, inventorySlot); checkSpellLevelsAndInventory(); updateInventory(); updateHasInventory(); saveState(); updateLore(); if (mage != null) { if (spells.size() != spellCount) { if (spellCount == 0) { if (leftClickAction == WandAction.CAST) { String message = getMessage("spell_instructions", "").replace("$wand", getName()); mage.sendMessage(message.replace("$spell", template.getName())); } } else if (spellCount == 1) { String controlKey = getControlKey(WandAction.TOGGLE); String inventoryMessage = null; switch (getMode()) { case INVENTORY: inventoryMessage = "inventory_instructions"; break; case CHEST: inventoryMessage = "chest_instructions"; break; case SKILLS: inventoryMessage = "skills_instructions"; break; case CYCLE: inventoryMessage = "cycle_instructions"; if (controlKey == null) { controlKey = getControlKey(WandAction.CYCLE); } break; case CAST: case NONE: // Ignore break; } if (controlKey != null && inventoryMessage != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage(inventoryMessage, "") .replace("$wand", getName()).replace("$toggle", controlKey).replace("$cycle", controlKey)); } } if (inventoryCount == 1 && inventories.size() > 1) { mage.sendMessage(getMessage("page_instructions", "").replace("$wand", getName())); } } } return true; } /** * Covers the special case of a wand having spell levels and inventory slots that came from configs, * but now we've modified the spells list and need to figure out if we also need to pesist the levels and * slots separately. * * <p>This should all be moved to CasterProperties at some point to handle the same sort of issues with mage class * configs. */ private void checkSpellLevelsAndInventory() { if (!spellLevels.isEmpty()) { MagicProperties storage = getStorage("spell_levels"); if (storage == null || storage == this) { if (!configuration.contains("spell_levels")) { configuration.set("spell_levels", spellLevels); } } } if (!spellInventory.isEmpty()) { MagicProperties storage = getStorage("spell_inventory"); if (storage == null || storage == this) { if (!configuration.contains("spell_inventory")) { configuration.set("spell_inventory", spellInventory); } } } } private void clearSlot(Integer slot) { if (slot != null) { Inventory inventory = getInventory(slot); slot = getInventorySlot(slot); inventory.setItem(slot, null); } } @Override public String getMessage(String messageKey, String defaultValue) { String message = super.getMessage(messageKey, defaultValue); // Some special-casing here, not sure how to avoid. if (messageKey.equals("hotbar_count_usage")) { String controlKey = getControlKey(WandAction.CYCLE_HOTBAR); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); message = message.replace("$cycle_hotbar", controlKey); } else { return ""; } } return message; } @Override protected String getMessageKey(String key) { String wandKey = "wands." + template + "." + key; if (template != null && !template.isEmpty() && controller.getMessages().containsKey(wandKey)) { return wandKey; } return "wand." + key; } @Override protected String parameterizeMessage(String message) { return message.replace("$wand", getName()); } @Override public boolean hasBrush(String materialKey) { if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } return getBrushes().contains(materialKey); } @Override public boolean hasSpell(String spellName) { return hasSpell(new SpellKey(spellName)); } @Override public boolean hasSpell(SpellKey spellKey) { if (!spells.contains(spellKey.getBaseKey())) return false; if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } int level = getSpellLevel(spellKey.getBaseKey()); return (level >= spellKey.getLevel()); } @Override public boolean addBrush(String materialKey) { if (!isModifiable()) return false; if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } suspendUpdate = true; if (!super.addBrush(materialKey)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); ItemStack itemStack = createBrushIcon(materialKey); if (itemStack == null) return false; int inventoryCount = inventories.size(); int brushCount = brushes.size(); brushInventory.put(materialKey, null); brushes.add(materialKey); addToInventory(itemStack); if (activeBrush == null || activeBrush.length() == 0) { activateBrush(materialKey); } else { updateInventory(); } updateHasInventory(); saveState(); updateLore(); if (mage != null) { if (brushCount == 0) { String controlKey = getControlKey(WandAction.TOGGLE); if (controlKey != null) { controlKey = controller.getMessages().get("controls." + controlKey); mage.sendMessage(getMessage("brush_instructions") .replace("$wand", getName()).replace("$toggle", controlKey)); } } if (inventoryCount == 1 && inventories.size() > 1) { mage.sendMessage(getMessage("page_instructions").replace("$wand", getName())); } } return true; } @Override public void setActiveBrush(String materialKey) { activateBrush(materialKey); if (materialKey == null || mage == null) { return; } com.elmakers.mine.bukkit.api.block.MaterialBrush brush = mage.getBrush(); if (brush == null) { return; } boolean eraseWasActive = brush.isEraseModifierActive(); brush.activate(mage.getLocation(), materialKey); BrushMode mode = brush.getMode(); if (mode == BrushMode.CLONE) { mage.sendMessage(getMessage("clone_material_activated")); } else if (mode == BrushMode.REPLICATE) { mage.sendMessage(getMessage("replicate_material_activated")); } if (!eraseWasActive && brush.isEraseModifierActive()) { mage.sendMessage(getMessage("erase_modifier_activated")); } } public void setActiveBrush(ItemStack itemStack) { if (!isBrush(itemStack)) return; setActiveBrush(getBrush(itemStack)); } public void activateBrush(String materialKey) { this.activeBrush = materialKey; setProperty("active_brush", this.activeBrush); saveState(); updateActiveMaterial(); updateName(); updateHotbar(); } @Override public void setActiveSpell(String activeSpell) { if (activeSpell != null) { SpellKey spellKey = new SpellKey(activeSpell); this.activeSpell = spellKey.getBaseKey(); } else { this.activeSpell = null; } checkActiveSpell(); setProperty("active_spell", this.activeSpell); saveState(); updateName(); } protected void checkActiveSpell() { // Support wands with just an active spell and no spells list if (activeSpell != null && !spells.isEmpty() && !spells.contains(activeSpell)) { activeSpell = null; } } @Override public boolean removeBrush(String materialKey) { if (!isModifiable() || materialKey == null) return false; if (limitBrushesToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsBrush(materialKey)) return false; } suspendUpdate = true; if (!removeBrush(materialKey)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); if (materialKey.equals(activeBrush)) { activeBrush = null; } clearSlot(brushInventory.get(materialKey)); brushInventory.remove(materialKey); boolean found = brushes.remove(materialKey); if (activeBrush == null && brushes.size() > 0) { activeBrush = brushes.iterator().next(); } updateActiveMaterial(); updateInventory(); updateBrushInventory(); saveState(); updateName(); updateLore(); return found; } @Override public boolean removeSpell(String spellName) { if (!isModifiable()) return false; SpellKey spellKey = new SpellKey(spellName); if (limitSpellsToPath) { WandUpgradePath path = getPath(); if (path != null && !path.containsSpell(spellKey.getBaseKey())) return false; } suspendUpdate = true; if (!super.removeSpell(spellName)) { suspendUpdate = false; return false; } suspendUpdate = false; saveInventory(); if (activeSpell != null) { SpellKey activeKey = new SpellKey(activeSpell); if (spellKey.getBaseKey().equals(activeKey.getBaseKey())) { setActiveSpell(null); } } clearSlot(spellInventory.get(spellKey.getBaseKey())); spells.remove(spellKey.getBaseKey()); spellLevels.remove(spellKey.getBaseKey()); spellInventory.remove(spellKey.getBaseKey()); if (activeSpell == null && spells.size() > 0) { setActiveSpell(spells.iterator().next()); } checkSpellLevelsAndInventory(); updateInventory(); updateHasInventory(); updateSpellInventory(); saveState(); updateName(); updateLore(); return true; } @Override public Map<String, String> getOverrides() { return castOverrides == null ? new HashMap<>() : new HashMap<>(castOverrides); } @Override public void setOverrides(Map<String, String> overrides) { if (overrides == null) { this.castOverrides = null; } else { this.castOverrides = new HashMap<>(overrides); } updateOverrides(); } @Override public void removeOverride(String key) { if (castOverrides != null) { castOverrides.remove(key); updateOverrides(); } } @Override public void setOverride(String key, String value) { if (castOverrides == null) { castOverrides = new HashMap<>(); } if (value == null || value.length() == 0) { castOverrides.remove(key); } else { castOverrides.put(key, value); } updateOverrides(); } @Override public boolean addOverride(String key, String value) { if (castOverrides == null) { castOverrides = new HashMap<>(); } boolean modified = false; if (value == null || value.length() == 0) { modified = castOverrides.containsKey(key); castOverrides.remove(key); } else { String current = castOverrides.get(key); modified = current == null || !current.equals(value); castOverrides.put(key, value); } if (modified) { updateOverrides(); } return modified; } protected void updateOverrides() { if (castOverrides != null && !castOverrides.isEmpty()) { setProperty("overrides", castOverrides); } else { setProperty("overrides", null); } } public boolean hasStoredInventory() { return storedInventory != null; } public Inventory getStoredInventory() { return storedInventory; } public boolean addToStoredInventory(ItemStack item) { if (storedInventory == null) { return false; } HashMap<Integer, ItemStack> remainder = storedInventory.addItem(item); return remainder.size() == 0; } public void setHeldSlot(int slot) { this.heldSlot = slot; } public boolean storeInventory() { if (storedInventory != null) { if (mage != null) { mage.sendMessage("Your wand contains a previously stored inventory and will not activate, let go of it to clear."); } controller.getLogger().warning("Tried to store an inventory with one already present: " + (mage == null ? "?" : mage.getName())); return false; } Player player = mage.getPlayer(); if (player == null) { return false; } PlayerInventory inventory = player.getInventory(); storedInventory = CompatibilityUtils.createInventory(null, PLAYER_INVENTORY_SIZE, "Stored Inventory"); for (int i = 0; i < PLAYER_INVENTORY_SIZE; i++) { ItemStack item = inventory.getItem(i); storedInventory.setItem(i, item); if (i != heldSlot) { inventory.setItem(i, null); } } return true; } public boolean restoreInventory() { if (storedInventory == null) { return false; } Player player = mage.getPlayer(); if (player == null) { return false; } PlayerInventory inventory = player.getInventory(); for (int i = 0; i < storedInventory.getSize(); i++) { if (i != heldSlot) { inventory.setItem(i, storedInventory.getItem(i)); } } storedInventory = null; inventory.setHeldItemSlot(heldSlot); return true; } @Override @Deprecated public boolean isSoul() { return false; } public static boolean isBound(ItemStack item) { Object wandSection = InventoryUtils.getNode(item, WAND_KEY); if (wandSection == null) return false; String boundValue = InventoryUtils.getMetaString(wandSection, "owner_id"); return boundValue != null; } @Override public boolean isBound() { return bound; } @Nullable @Override public SpellTemplate getSpellTemplate(String spellKey) { SpellKey key = new SpellKey(spellKey); spellKey = key.getBaseKey(); if (!spells.contains(spellKey)) return null; Integer level = spellLevels.get(spellKey); if (level != null) { spellKey = new SpellKey(spellKey, level).getKey(); } return controller.getSpellTemplate(spellKey); } private void setSpellLevel(String spellKey, int level) { if (level <= 1) { spellLevels.remove(spellKey); } else { spellLevels.put(spellKey, level); } } @Override public int getSpellLevel(String spellKey) { Integer level = spellLevels.get(spellKey); return level == null ? 1 : level; } @Override public MageController getController() { return controller; } protected Map<String, Integer> getSpellInventory() { return new HashMap<>(spellInventory); } protected Map<String, Integer> getBrushInventory() { return new HashMap<>(brushInventory); } public Map<PotionEffectType, Integer> getPotionEffects() { return potionEffects; } @Override public float getHealthRegeneration() { Integer level = potionEffects.get(PotionEffectType.REGENERATION); return level != null && level > 0 ? (float)level : 0; } @Override public float getHungerRegeneration() { Integer level = potionEffects.get(PotionEffectType.SATURATION); return level != null && level > 0 ? (float)level : 0; } @Nullable @Override public WandTemplate getTemplate() { if (template == null || template.isEmpty()) return null; return controller.getWandTemplate(template); } public boolean playPassiveEffects(String effects) { WandTemplate wandTemplate = getTemplate(); if (wandTemplate != null && mage != null) { boolean offhandActive = mage.setOffhandActive(isInOffhand); boolean result = false; try { result = wandTemplate.playEffects(this, effects); } catch (Exception ex) { result = false; controller.getLogger().log(Level.WARNING, "Error playing effects " + effects + " from wand " + template, ex); } mage.setOffhandActive(offhandActive); return result; } return false; } @Override public boolean playEffects(String effects) { if (activeEffectsOnly && !inventoryIsOpen) { return false; } return playPassiveEffects(effects); } @Override public WandAction getDropAction() { return dropAction; } @Override public WandAction getRightClickAction() { return rightClickAction; } @Override public WandAction getLeftClickAction() { return leftClickAction; } @Override public WandAction getSwapAction() { return swapAction; } @Override public boolean performAction(WandAction action) { WandMode mode = getMode(); switch (action) { case CAST: cast(); break; case ALT_CAST: alternateCast(); break; case ALT_CAST2: alternateCast2(); break; case TOGGLE: if (mode == WandMode.CYCLE) { cycleActive(1); return true; } if (mode != WandMode.CHEST && mode != WandMode.INVENTORY && mode != WandMode.SKILLS) return false; toggleInventory(); break; case CYCLE: cycleActive(1); break; case CYCLE_REVERSE: cycleActive(-1); break; case CYCLE_HOTBAR: if (mode != WandMode.INVENTORY || !isInventoryOpen()) return false; if (getHotbarCount() > 1) { cycleHotbar(1); } else { closeInventory(); } break; case CYCLE_HOTBAR_REVERSE: if (mode != WandMode.INVENTORY) return false; if (getHotbarCount() > 1) { cycleHotbar(-1); } else if (isInventoryOpen()) { closeInventory(); } else { return false; } break; default: return false; } return true; } @Override public boolean checkAndUpgrade(boolean quiet) { WandUpgradePath path = getPath(); WandUpgradePath nextPath = path != null ? path.getUpgrade() : null; if (nextPath == null) { return true; } if (canProgress()) { return true; } if (!path.checkUpgradeRequirements(this, quiet ? null : mage)) { return false; } path.upgrade(this, mage); return true; } @Override public boolean hasUpgrade() { WandUpgradePath path = getPath(); return path != null && path.hasUpgrade(); } @Override public boolean checkUpgrade(boolean quiet) { WandUpgradePath path = getPath(); return path == null || !path.hasUpgrade() ? false : path.checkUpgradeRequirements(this, quiet ? null : mage); } @Override @Deprecated public boolean upgrade(Map<String, Object> properties) { Map<Object, Object> convertedProperties = new HashMap<>(properties); return upgrade(ConfigurationUtils.toConfigurationSection(convertedProperties)); } @Override public boolean upgrade(boolean quiet) { WandUpgradePath path = getPath(); if (path == null) return false; path.upgrade(this, quiet ? null : mage); return true; } @Override public boolean isBlocked(double angle) { if (mage == null) return false; if (blockChance == 0) return false; if (blockFOV > 0 && angle > blockFOV) return false; long lastBlock = mage.getLastBlockTime(); if (blockCooldown > 0 && lastBlock > 0 && lastBlock + blockCooldown > System.currentTimeMillis()) return false; boolean isBlocked = Math.random() <= blockChance; if (isBlocked) { playEffects("spell_blocked"); mage.setLastBlockTime(System.currentTimeMillis()); } return isBlocked; } @Override public boolean isReflected(double angle) { if (mage == null) return false; if (blockReflectChance == 0) return false; if (blockFOV > 0 && angle > blockFOV) return false; long lastBlock = mage.getLastBlockTime(); if (blockCooldown > 0 && lastBlock > 0 && lastBlock + blockCooldown > System.currentTimeMillis()) return false; boolean isReflected = Math.random() <= blockReflectChance; if (isReflected) { playEffects("spell_reflected"); if (mage != null) mage.setLastBlockTime(System.currentTimeMillis()); } return isReflected; } @Nullable @Override public Location getLocation() { if (mage == null) { return null; } Location wandLocation = mage.getEyeLocation(); wandLocation = mage.getOffsetLocation(wandLocation, isInOffhand, castLocation == null ? DEFAULT_CAST_OFFSET : castLocation); return wandLocation; } @Nullable @Override public Mage getMage() { return mage; } @Override public @Nullable MageClass getMageClass() { return mageClass; } @Override public @Nullable String getMageClassKey() { if (mageClass != null) { return mageClass.getKey(); } return mageClassKeys == null || mageClassKeys.isEmpty() ? null : mageClassKeys.get(0); } public void setCurrentHotbar(int hotbar) { this.currentHotbar = hotbar; setProperty("hotbar", currentHotbar); } public int getInventorySize() { WandMode mode = getMode(); if (mode == WandMode.CHEST || mode == WandMode.SKILLS) { return 9 * inventoryRows; } return INVENTORY_SIZE; } public boolean usesCurrency() { if (currencyDisplay == null || !hasSpellProgression || earnMultiplier <= 0 || !currencyDisplay.isValid()) return false; if (currencyDisplay.getKey().equals("sp") && !controller.isSPEarnEnabled()) return false; return true; } public boolean usesCurrency(String type) { return usesCurrency() && currencyDisplay.getKey().equals(type); } public boolean usesSP() { return controller.isSPEarnEnabled() && usesCurrency("sp"); } @Override public int getHeldSlot() { return heldSlot; } @Nullable @Override protected BaseMagicConfigurable getStorage(MagicPropertyType propertyType) { switch (propertyType) { case WAND: return this; case SUBCLASS: return mageClass; case CLASS: if (mageClass == null) { if (mage == null) { controller.getLogger().warning("Something is trying to modify a wand when it's not held, this may not work out"); } else { controller.getLogger().warning("Something is trying to modify a wand that has no class, this may not work out"); } Thread.dumpStack(); } return mageClass == null ? null : mageClass.getRoot(); case MAGE: if (mage == null) { controller.getLogger().warning("Something is trying to modify a wand when it's not held, this may not work out"); Thread.dumpStack(); } return mage == null ? null : mage.getProperties(); } return null; } @Override public boolean isPlayer() { return mage == null ? false : mage.isPlayer(); } @Nullable @Override public Player getPlayer() { return mage == null ? null : mage.getPlayer(); } @Override @Nonnull public WandEffectContext getEffectContext() { if (effectContext == null || (effectContext.getMage() != mage)) { // Lazy load or mage has changed effectContext = new WandEffectContext(mage, this); } return verifyNotNull(effectContext); } @Override public Wand getWand() { return this; } @Override public boolean isInOffhand() { return isInOffhand; } }
Fix potential NPE on loading an invalid wand icon
Magic/src/main/java/com/elmakers/mine/bukkit/wand/Wand.java
Fix potential NPE on loading an invalid wand icon
Java
mit
ee1b35fb44ac18c3358a101c710569218cdc2e32
0
amwenger/igv,igvteam/igv,itenente/igv,amwenger/igv,itenente/igv,godotgildor/igv,godotgildor/igv,amwenger/igv,itenente/igv,igvteam/igv,igvteam/igv,godotgildor/igv,amwenger/igv,igvteam/igv,godotgildor/igv,godotgildor/igv,itenente/igv,itenente/igv,igvteam/igv,amwenger/igv
/* * Copyright (c) 2007-2012 The Broad Institute, Inc. * SOFTWARE COPYRIGHT NOTICE * This software and its documentation are the copyright of the Broad Institute, Inc. All rights are reserved. * * This software is supplied without any warranty or guaranteed support whatsoever. The Broad Institute is not responsible for its use, misuse, or functionality. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. */ package org.broad.igv.util.collections; import org.broad.igv.AbstractHeadlessTest; import org.broad.igv.data.Interval; import org.broad.igv.sam.AlignmentDataManager; import org.broad.igv.sam.AlignmentDataManagerTest; import org.broad.igv.ui.panel.ReferenceFrame; import org.junit.After; import org.junit.Test; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * User: jacob * Date: 2012-Oct-16 */ public class CachedIntervalsTest extends AbstractHeadlessTest { private CachedIntervals cachedIntervals; @After public void tearDown() throws Exception { super.tearDown(); cachedIntervals = null; } private static List<ReferenceFrame> getTestFrames(String chr) { ReferenceFrame frame0 = new ReferenceFrame("leftFrame"); frame0.setChromosomeName(chr); frame0.setOrigin(151666680); frame0.setBounds(0, 5); System.out.println(frame0.getEnd()); ReferenceFrame frame1 = new ReferenceFrame("rightFrame"); frame1.setChromosomeName(chr); frame1.setOrigin(153537238); frame1.setBounds(0, 5); return Arrays.asList(frame0, frame1); } /** * Test that having an interval which spans between two reference frames * still exists after trimming * * @throws Exception */ @Test public void testTrimSplit() throws Exception { tstTrim(TrimTestType.SPLIT); } /** * Test that having an interval which spans between two reference frames * still exists after trimming * * @throws Exception */ @Test public void testTrimSpan() throws Exception { tstTrim(TrimTestType.SPAN); } public void tstTrim(TrimTestType testType) throws Exception { //We set a small maxintervalsize to make sure it gets trimmed cachedIntervals = new CachedIntervals<Interval>(5, 100); String chr = "chr1"; List<ReferenceFrame> frameList = getTestFrames(chr); assertTrue(frameList.get(1).getOrigin() > frameList.get(0).getEnd()); cachedIntervals.setLocusList(frameList); AlignmentDataManager manager = AlignmentDataManagerTest.getManager171(); int offset = 5; int alQueryStart, alQueryEnd; int postTrimStart, postTrimEnd; switch (testType) { case SPLIT: alQueryStart = (int) frameList.get(0).getEnd() - 200; alQueryEnd = (int) frameList.get(1).getOrigin() + 200; postTrimStart = alQueryStart; postTrimEnd = alQueryEnd; break; case SPAN: alQueryStart = (int) frameList.get(0).getOrigin() - 100; alQueryEnd = (int) frameList.get(1).getEnd() + 100; postTrimStart = (int) frameList.get(0).getOrigin(); postTrimEnd = (int) frameList.get(1).getEnd(); break; default: throw new IllegalArgumentException("Unknown testType " + testType); } //Put in a small interval first, adding the second one triggers the trim Interval interval_0 = AlignmentDataManagerTest.loadInterval(manager, chr, alQueryStart, alQueryStart + offset); cachedIntervals.put(interval_0); Interval interval_1 = AlignmentDataManagerTest.loadInterval(manager, chr, alQueryStart + offset, alQueryEnd + offset); cachedIntervals.put(interval_1); assertEquals(1, cachedIntervals.getContains(chr, postTrimStart, postTrimEnd, -1).size()); } private enum TrimTestType { //Interval is partially covered by two different reference frames SPLIT, //Interval completely covers two different reference frames SPAN } }
test/src/org/broad/igv/util/collections/CachedIntervalsTest.java
/* * Copyright (c) 2007-2012 The Broad Institute, Inc. * SOFTWARE COPYRIGHT NOTICE * This software and its documentation are the copyright of the Broad Institute, Inc. All rights are reserved. * * This software is supplied without any warranty or guaranteed support whatsoever. The Broad Institute is not responsible for its use, misuse, or functionality. * * This software is licensed under the terms of the GNU Lesser General Public License (LGPL), * Version 2.1 which is available at http://www.opensource.org/licenses/lgpl-2.1.php. */ package org.broad.igv.util.collections; import org.broad.igv.AbstractHeadlessTest; import org.broad.igv.data.Interval; import org.broad.igv.sam.AlignmentDataManager; import org.broad.igv.sam.AlignmentDataManagerTest; import org.broad.igv.ui.panel.ReferenceFrame; import org.junit.After; import org.junit.Test; import java.util.Arrays; import java.util.List; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * User: jacob * Date: 2012-Oct-16 */ public class CachedIntervalsTest extends AbstractHeadlessTest { private CachedIntervals cachedIntervals; @After public void tearDown() throws Exception { super.tearDown(); cachedIntervals = null; } private static List<ReferenceFrame> getTestFrames(String chr) { ReferenceFrame frame0 = new ReferenceFrame("leftFrame"); frame0.setChromosomeName(chr); frame0.setOrigin(151666680); frame0.setBounds(0, 10); ReferenceFrame frame1 = new ReferenceFrame("rightFrame"); frame1.setChromosomeName(chr); frame1.setOrigin(155537238); frame1.setBounds(0, 10); return Arrays.asList(frame0, frame1); } /** * Test that having an interval which spans between two reference frames * still exists after trimming * * @throws Exception */ @Test public void testTrimSplit() throws Exception { tstTrim(TrimTestType.SPLIT); } /** * Test that having an interval which spans between two reference frames * still exists after trimming * * @throws Exception */ @Test public void testTrimSpan() throws Exception { tstTrim(TrimTestType.SPAN); } public void tstTrim(TrimTestType testType) throws Exception { //We set a small maxintervalsize to make sure it gets trimmed cachedIntervals = new CachedIntervals<Interval>(5, 100); String chr = "chr1"; List<ReferenceFrame> frameList = getTestFrames(chr); assertTrue(frameList.get(1).getOrigin() > frameList.get(0).getEnd()); cachedIntervals.setLocusList(frameList); AlignmentDataManager manager = AlignmentDataManagerTest.getManager171(); int offset = 5; int alQueryStart, alQueryEnd; int postTrimStart, postTrimEnd; switch (testType) { case SPLIT: alQueryStart = (int) frameList.get(0).getEnd() - 200; alQueryEnd = (int) frameList.get(1).getOrigin() + 200; postTrimStart = alQueryStart; postTrimEnd = alQueryEnd; break; case SPAN: alQueryStart = (int) frameList.get(0).getOrigin() - 100; alQueryEnd = (int) frameList.get(1).getEnd() + 100; postTrimStart = (int) frameList.get(0).getOrigin(); postTrimEnd = (int) frameList.get(1).getEnd(); break; default: throw new IllegalArgumentException("Unknown testType " + testType); } //Put in a small interval first, adding the second one triggers the trim Interval interval_0 = AlignmentDataManagerTest.loadInterval(manager, chr, alQueryStart, alQueryStart + offset); cachedIntervals.put(interval_0); Interval interval_1 = AlignmentDataManagerTest.loadInterval(manager, chr, alQueryStart + offset, alQueryEnd + offset); cachedIntervals.put(interval_1); assertEquals(1, cachedIntervals.getContains(chr, postTrimStart, postTrimEnd, -1).size()); } private enum TrimTestType { //Interval is partially covered by two different reference frames SPLIT, //Interval completely covers two different reference frames SPAN } }
modify CachedIntervalsTest so the test intervals are smaller (uses less memory, hopefully won't crash jenkins
test/src/org/broad/igv/util/collections/CachedIntervalsTest.java
modify CachedIntervalsTest so the test intervals are smaller (uses less memory, hopefully won't crash jenkins
Java
epl-1.0
5c9611b332091d088e970fce6abee2de1cc5f7b3
0
01org/mayloon-portingtool,backwind1233/mayloon-portingtool,royleexhFake/mayloon-portingtool,royleexhFake/mayloon-portingtool,modulexcite/mayloon-portingtool,01org/mayloon-portingtool,modulexcite/mayloon-portingtool,modulexcite/mayloon-portingtool,backwind1233/mayloon-portingtool
/******************************************************************************* * Copyright (c) 2007 java2script.org and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Zhou Renjian - initial API and implementation *******************************************************************************/ package net.sf.j2s.core.astvisitors; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.jdt.core.dom.Expression; /** * Final variables inside anonymous class is a big thing for Java2Script * compiler. And Java2Script compiler also tries to minimize the variable * name. * * @author zhou renjian * * 2006-12-3 */ public class ASTVariableVisitor extends AbstractPluginVisitor { /** * List of variables that are declared as final. */ protected List finalVars = new ArrayList(); /** * Final variables only make senses (need "this.f$[...]") inside anonymous * class. */ protected boolean isFinalSensible = true; /** * Normal (non-final) variables may be affected by final variable names. */ protected List normalVars = new ArrayList(); /** * Only those final variables that are referenced inside anonymous class * need to be passed into anonymous class. */ protected List visitedVars = new ArrayList(); /** * Whether to compile variable names into minimized names or not */ protected boolean toCompileVariableName = true; public boolean isToCompileVariableName() { return toCompileVariableName; } public void setToCompileVariableName(boolean toCompileVariableName) { this.toCompileVariableName = toCompileVariableName; } protected String getVariableName(String name) { for (int i = normalVars.size() - 1; i >= 0; i--) { ASTFinalVariable var = (ASTFinalVariable) normalVars.get(i); if (name.equals(var.variableName)) { //return getIndexedVarName(name, i); return var.toVariableName; } } return name; } /** * Try to return a minimized variable name for the given index order. * @param name * @param i * @return */ public String getIndexedVarName(String name, int i) { if (!toCompileVariableName) { return name; } String newName = null; while (true) { if (i < 26) { newName = String.valueOf((char) ('a' + i)); } else if (i < 52) { newName = String.valueOf((char) ('A' + (i - 26))); } else { /* * Here compiler assumes that there are no project with more than * 26 * 26 variables. */ int h = i / 26; int l = i % 26; newName = String.valueOf((char) ('a' + h)) + String.valueOf((char) ('a' + l)); } for (Iterator iter = finalVars.iterator(); iter.hasNext();) { ASTFinalVariable f = (ASTFinalVariable) iter.next(); if (newName.equals(f.toVariableName)) { newName = null; i++; break; } } if (newName != null) { for (Iterator iter = normalVars.iterator(); iter.hasNext();) { ASTFinalVariable f = (ASTFinalVariable) iter.next(); if (newName.equals(f.toVariableName)) { newName = null; i++; break; } } } if (newName != null) { break; } } return newName; } /** * Generated final variable list for anonymous class creation. * <ol> * <li>Generate "null" if there are no referenced final variales inside * anonymous class</li> * <li>Generate "Clazz.cloneFinals (...)" if there are referenced final * variable</li> * </ol> * * @param list * @param seperator * @param scope * @return */ protected String listFinalVariables(List list, String seperator, String scope) { if (list.size() == 0) { return "null"; } StringBuffer buf = new StringBuffer(); buf.append("Clazz.cloneFinals ("); for (Iterator iter = list.iterator(); iter.hasNext();) { ASTFinalVariable fv = (ASTFinalVariable) iter.next(); String name = fv.variableName; if (fv.toVariableName != null) { name = fv.toVariableName; } buf.append("\""); buf.append(name); buf.append("\", "); String methodScope = fv.methodScope; if (methodScope == null && scope == null) { buf.append(name); } else if (methodScope == null || scope == null) { buf.append("this.$finals." + name); } else if (methodScope.equals(scope)) { buf.append(name); } else { buf.append("this.$finals." + name); } if (iter.hasNext()) { buf.append(seperator); } } buf.append(")"); return buf.toString(); } /** * handle the special symbol for char and string. * @param char * @return String **/ protected String checkCharValue(char c) { StringBuffer buffer = new StringBuffer(); if (c == '\\' || c == '\'' || c == '\"') { buffer.append('\\'); buffer.append(c); } else if (c == '\r') { buffer.append("\\r"); } else if (c == '\n') { buffer.append("\\n"); } else if (c == '\t') { buffer.append("\\t"); } else if (c == '\f') { buffer.append("\\f"); } else if (c < 32 || c > 127) { buffer.append("\\u"); String hexStr = Integer.toHexString(c); int zeroLen = 4 - hexStr.length(); for (int i = 0; i < zeroLen; i++) { buffer.append('0'); } buffer.append(hexStr); } else { buffer.append(c); } return buffer.toString(); } /** * If given expression is constant value expression, return its value * string; or return null. * * @param node * @return */ protected String checkConstantValue(Expression node) { Object constValue = node.resolveConstantExpressionValue(); if (constValue != null && (constValue instanceof Number || constValue instanceof Character || constValue instanceof Boolean)) { StringBuffer buffer = new StringBuffer(); if (constValue instanceof Character) { buffer.append('\''); char charValue = ((Character)constValue).charValue(); buffer.append(checkCharValue(charValue)); buffer.append('\''); } else { buffer.append(constValue); } return buffer.toString(); } if (constValue != null && (constValue instanceof String)) { StringBuffer buffer = new StringBuffer(); String str = (String) constValue; int length = str.length(); /* if (length > 20) { return null; }*/ buffer.append("\""); for (int i = 0; i < length; i++) { char c = str.charAt(i); buffer.append(checkCharValue(c)); } buffer.append("\""); return buffer.toString(); } return null; } }
sources/net.sf.j2s.core/src/net/sf/j2s/core/astvisitors/ASTVariableVisitor.java
/******************************************************************************* * Copyright (c) 2007 java2script.org and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Zhou Renjian - initial API and implementation *******************************************************************************/ package net.sf.j2s.core.astvisitors; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import org.eclipse.jdt.core.dom.Expression; /** * Final variables inside anonymous class is a big thing for Java2Script * compiler. And Java2Script compiler also tries to minimize the variable * name. * * @author zhou renjian * * 2006-12-3 */ public class ASTVariableVisitor extends AbstractPluginVisitor { /** * List of variables that are declared as final. */ protected List finalVars = new ArrayList(); /** * Final variables only make senses (need "this.f$[...]") inside anonymous * class. */ protected boolean isFinalSensible = true; /** * Normal (non-final) variables may be affected by final variable names. */ protected List normalVars = new ArrayList(); /** * Only those final variables that are referenced inside anonymous class * need to be passed into anonymous class. */ protected List visitedVars = new ArrayList(); /** * Whether to compile variable names into minimized names or not */ protected boolean toCompileVariableName = true; public boolean isToCompileVariableName() { return toCompileVariableName; } public void setToCompileVariableName(boolean toCompileVariableName) { this.toCompileVariableName = toCompileVariableName; } protected String getVariableName(String name) { for (int i = normalVars.size() - 1; i >= 0; i--) { ASTFinalVariable var = (ASTFinalVariable) normalVars.get(i); if (name.equals(var.variableName)) { //return getIndexedVarName(name, i); return var.toVariableName; } } return name; } /** * Try to return a minimized variable name for the given index order. * @param name * @param i * @return */ public String getIndexedVarName(String name, int i) { if (!toCompileVariableName) { return name; } String newName = null; while (true) { if (i < 26) { newName = String.valueOf((char) ('a' + i)); } else if (i < 52) { newName = String.valueOf((char) ('A' + (i - 26))); } else { /* * Here compiler assumes that there are no project with more than * 26 * 26 variables. */ int h = i / 26; int l = i % 26; newName = String.valueOf((char) ('a' + h)) + String.valueOf((char) ('a' + l)); } for (Iterator iter = finalVars.iterator(); iter.hasNext();) { ASTFinalVariable f = (ASTFinalVariable) iter.next(); if (newName.equals(f.toVariableName)) { newName = null; i++; break; } } if (newName != null) { for (Iterator iter = normalVars.iterator(); iter.hasNext();) { ASTFinalVariable f = (ASTFinalVariable) iter.next(); if (newName.equals(f.toVariableName)) { newName = null; i++; break; } } } if (newName != null) { break; } } return newName; } /** * Generated final variable list for anonymous class creation. * <ol> * <li>Generate "null" if there are no referenced final variales inside * anonymous class</li> * <li>Generate "Clazz.cloneFinals (...)" if there are referenced final * variable</li> * </ol> * * @param list * @param seperator * @param scope * @return */ protected String listFinalVariables(List list, String seperator, String scope) { if (list.size() == 0) { return "null"; } StringBuffer buf = new StringBuffer(); buf.append("Clazz.cloneFinals ("); for (Iterator iter = list.iterator(); iter.hasNext();) { ASTFinalVariable fv = (ASTFinalVariable) iter.next(); String name = fv.variableName; if (fv.toVariableName != null) { name = fv.toVariableName; } buf.append("\""); buf.append(name); buf.append("\", "); String methodScope = fv.methodScope; if (methodScope == null && scope == null) { buf.append(name); } else if (methodScope == null || scope == null) { buf.append("this.$finals." + name); } else if (methodScope.equals(scope)) { buf.append(name); } else { buf.append("this.$finals." + name); } if (iter.hasNext()) { buf.append(seperator); } } buf.append(")"); return buf.toString(); } /** * If given expression is constant value expression, return its value * string; or return null. * * @param node * @return */ protected String checkConstantValue(Expression node) { Object constValue = node.resolveConstantExpressionValue(); if (constValue != null && (constValue instanceof Number || constValue instanceof Character || constValue instanceof Boolean)) { StringBuffer buffer = new StringBuffer(); if (constValue instanceof Character) { buffer.append('\''); char charValue = ((Character)constValue).charValue(); if (charValue < 32 || charValue > 127) { buffer.append("\\u"); String hexStr = Integer.toHexString(charValue); int zeroLen = 4 - hexStr.length(); for (int i = 0; i < zeroLen; i++) { buffer.append('0'); } buffer.append(hexStr); } else { buffer.append(constValue); } buffer.append('\''); } else { buffer.append(constValue); } return buffer.toString(); } if (constValue != null && (constValue instanceof String)) { StringBuffer buffer = new StringBuffer(); String str = (String) constValue; int length = str.length(); /* if (length > 20) { return null; }*/ buffer.append("\""); for (int i = 0; i < length; i++) { char c = str.charAt(i); if (c == '\\' || c == '\'' || c == '\"') { buffer.append('\\'); buffer.append(c); } else if (c == '\r') { buffer.append("\\r"); } else if (c == '\n') { buffer.append("\\n"); } else if (c == '\t') { buffer.append("\\t"); } else if (c == '\f') { buffer.append("\\f"); } else if (c < 32 || c > 127) { buffer.append("\\u"); String hexStr = Integer.toHexString(c); int zeroLen = 4 - hexStr.length(); for (int k = 0; k < zeroLen; k++) { buffer.append('0'); } buffer.append(hexStr); } else { buffer.append(c); } } buffer.append("\""); return buffer.toString(); } return null; } }
fix bug 1709 Unterminated string literal appear in DateFormat.java. For field public static final char QUOTE = '\''; J2S compile it to ''' where it be uesed
sources/net.sf.j2s.core/src/net/sf/j2s/core/astvisitors/ASTVariableVisitor.java
fix bug 1709 Unterminated string literal appear in DateFormat.java. For field public static final char QUOTE = '\''; J2S compile it to ''' where it be uesed
Java
mpl-2.0
d7e5530b3d235afd242bd5d56456e28712b5cb79
0
Tropicraft/Tropicraft,Tropicraft/Tropicraft,Tropicraft/Tropicraft
package net.tropicraft.core.common.entity.placeable; import com.google.common.collect.ImmutableList; import net.minecraft.block.BlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.settings.PointOfView; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityType; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.MoverType; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.fluid.FluidState; import net.minecraft.fluid.Fluids; import net.minecraft.item.DyeColor; import net.minecraft.item.ItemStack; import net.minecraft.network.PacketBuffer; import net.minecraft.tags.FluidTags; import net.minecraft.util.ActionResultType; import net.minecraft.util.Direction; import net.minecraft.util.Hand; import net.minecraft.util.SharedSeedRandom; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.math.RayTraceResult; import net.minecraft.world.World; import net.minecraft.world.gen.PerlinNoiseGenerator; import net.minecraftforge.fml.common.registry.IEntityAdditionalSpawnData; import net.tropicraft.core.common.item.TropicraftItems; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.List; import java.util.Random; public class BeachFloatEntity extends FurnitureEntity implements IEntityAdditionalSpawnData { @Nonnull private static final Random rand = new Random(298457L); @Nonnull private static final PerlinNoiseGenerator windNoise = new PerlinNoiseGenerator(new SharedSeedRandom(298457L), ImmutableList.of(0)); /* Wind */ private double windModifier = 0; /* Is any entity laying on the float? */ public boolean isEmpty; /* Acceleration */ public float rotationSpeed; /* Water checks */ private double prevMotionY; public BeachFloatEntity(EntityType<BeachFloatEntity> type, World worldIn) { super(type, worldIn, TropicraftItems.BEACH_FLOATS); this.ignoreFrustumCheck = true; this.isEmpty = true; this.preventEntitySpawning = true; this.entityCollisionReduction = .95F; setEntityId(this.getEntityId()); } @Override public void setEntityId(int id) { super.setEntityId(id); rand.setSeed(id); this.windModifier = (1 + (rand.nextGaussian() * 0.1)) - 0.05; } @Override public void tick() { Entity rider = getControllingPassenger(); if (world.isRemote && rider instanceof PlayerEntity) { PlayerEntity controller = (PlayerEntity) rider; float move = controller.moveForward; float rot = -controller.moveStrafing; rotationSpeed += rot * 0.25f; float ang = rotationYaw; float moveX = MathHelper.sin(-ang * 0.017453292F) * move * 0.0035f; float moveZ = MathHelper.cos(ang * 0.017453292F) * move * 0.0035f; setMotion(getMotion().add(moveX, 0, moveZ)); } if (this.inWater) { double windAng = (windNoise.noiseAt(getPosX() / 1000, getPosZ() / 1000, false) + 1) * Math.PI; double windX = Math.sin(windAng) * 0.0005 * windModifier; double windZ = Math.cos(windAng) * 0.0005 * windModifier; setMotion(getMotion().add(windX, 0, windZ)); // Rotate towards a target yaw with some random perturbance double targetYaw = Math.toDegrees(windAng) + ((windModifier - 1) * 45); double yaw = (MathHelper.wrapDegrees(this.rotationYaw) + 180 - 35) % 360; double angleDiff = targetYaw - yaw; if (angleDiff > 0) { this.rotationSpeed += Math.min(0.005 * windModifier, angleDiff); } else { this.rotationSpeed += Math.max(-0.005 * windModifier, angleDiff); } } double water = getWaterLevel(); double center = getCenterY(); double eps = 1 / 16D; if (water < center - eps) { // Gravity setMotion(getMotion().add(0, -MathHelper.clamp(center - water, 0, 0.04), 0)); } else if (water > center + eps) { double floatpush = MathHelper.clamp(water - center, 0, 0.02); setMotion(getMotion().add(0, floatpush, 0)); } else if (Math.abs(getMotion().y) < 0.02) { // Close enough, just force to the correct spot if (getMotion().y != 0) { lerpY = water - 0.011; } setMotion(getMotion().mul(1, 0, 1)); prevMotionY = 0; } super.tick(); rotationYaw += rotationSpeed; move(MoverType.PLAYER, getMotion()); setMotion(getMotion().mul(0.9, 0.9, 0.9)); rotationSpeed *= 0.9f; if (!this.world.isRemote) { List<Entity> list = this.world.getEntitiesWithinAABBExcludingEntity(this, this.getBoundingBox().grow(0.20000000298023224D, 0.0D, 0.20000000298023224D)); for (Entity entity : list) { if (entity != this.getControllingPassenger() && entity.canBePushed()) { entity.applyEntityCollision(this); } } if (this.getControllingPassenger() != null && !this.getControllingPassenger().isAlive()) { this.removePassengers(); } } } @Override protected boolean preventMotion() { return false; } private double getCenterY() { AxisAlignedBB bb = getBoundingBox(); return bb.minY + (bb.maxY - bb.minY) * 0.5D; } @Override protected void updateFallState(double y, boolean onGroundIn, BlockState state, BlockPos pos) { this.prevMotionY = this.getMotion().y; super.updateFallState(y, onGroundIn, state, pos); } @Override protected boolean func_233566_aG_() { this.eyesFluidLevel.clear(); this.updateWaterState(); boolean lava = this.handleFluidAcceleration(FluidTags.LAVA, this.world.getDimensionType().isUltrawarm() ? 0.007 : 0.0023333333333333335D); return this.isInWater() || lava; } void updateWaterState() { AxisAlignedBB temp = getBoundingBox(); setBoundingBox(temp.contract(1, 0, 1).contract(-1, 0.125, -1)); try { if (this.handleFluidAcceleration(FluidTags.WATER, 0.014D)) { if (!this.inWater && !this.firstUpdate) { this.doWaterSplashEffect(); } this.fallDistance = 0.0F; this.inWater = true; this.extinguish(); } else { this.inWater = false; } } finally { setBoundingBox(temp); } } @Override public ActionResultType processInitialInteract(PlayerEntity player, Hand hand) { if (!this.world.isRemote && !player.isSneaking()) { player.startRiding(this); return ActionResultType.SUCCESS; } return !player.isRidingSameEntity(this) ? ActionResultType.SUCCESS : ActionResultType.PASS; } /* Following three methods copied from EntityBoat for passenger updates */ @Override public void updatePassenger(@Nonnull Entity passenger) { if (this.isPassenger(passenger)) { // float yaw = this.rotationYaw; // passenger.setPosition(x, this.posY + this.getMountedYOffset() + passenger.getYOffset(), z); float f = 0.0F; float f1 = (float) ((!isAlive() ? 0.001 : this.getMountedYOffset()) + passenger.getYOffset()); if (this.getPassengers().size() > 1) { int i = this.getPassengers().indexOf(passenger); if (i == 0) { f = 0.2F; } else { f = -0.6F; } if (passenger instanceof LivingEntity) { f = (float) ((double) f + 0.2D); } } float len = 0.6f; double x = this.getPosX() + (-MathHelper.sin(-this.rotationYaw * 0.017453292F) * len); double z = this.getPosZ() + (-MathHelper.cos(this.rotationYaw * 0.017453292F) * len); passenger.setPosition(x, this.getPosY() + (double) f1, z); passenger.rotationYaw += this.rotationSpeed; passenger.setRotationYawHead(passenger.getRotationYawHead() + this.rotationSpeed); this.applyYawToEntity(passenger); if (passenger instanceof LivingEntity && this.getPassengers().size() > 1) { int j = passenger.getEntityId() % 2 == 0 ? 90 : 270; passenger.setRenderYawOffset(((LivingEntity) passenger).renderYawOffset + (float) j); passenger.setRotationYawHead(passenger.getRotationYawHead() + (float) j); } if (passenger instanceof PlayerEntity) { ((PlayerEntity) passenger).setBoundingBox(getBoundingBox().expand(0, 0.3, 0).contract(0, -0.1875, 0)); } } } @Override protected void removePassenger(Entity passenger) { super.removePassenger(passenger); if (passenger instanceof PlayerEntity) { passenger.recalculateSize(); } } protected void applyYawToEntity(Entity entityToUpdate) { if (!entityToUpdate.world.isRemote || isClientFirstPerson(entityToUpdate)) { entityToUpdate.setRenderYawOffset(this.rotationYaw); float yaw = MathHelper.wrapDegrees(entityToUpdate.rotationYaw - this.rotationYaw); float pitch = MathHelper.wrapDegrees(entityToUpdate.rotationPitch - this.rotationPitch); float clampedYaw = MathHelper.clamp(yaw, -105.0F, 105.0F); float clampedPitch = MathHelper.clamp(pitch, -100F, -10F); entityToUpdate.prevRotationYaw += clampedYaw - yaw; entityToUpdate.rotationYaw += clampedYaw - yaw; entityToUpdate.prevRotationPitch += clampedPitch - pitch; entityToUpdate.rotationPitch += clampedPitch - pitch; entityToUpdate.setRotationYawHead(entityToUpdate.rotationYaw); } } @Override public void applyOrientationToEntity(@Nonnull Entity entityToUpdate) { this.applyYawToEntity(entityToUpdate); } private static boolean isClientFirstPerson(Entity entity) { Minecraft client = Minecraft.getInstance(); return client.renderViewEntity == entity && client.gameSettings.getPointOfView() == PointOfView.FIRST_PERSON; } /* Again, from entity boat, for water checks */ private float getWaterLevel() { AxisAlignedBB axisalignedbb = this.getBoundingBox(); int minX = MathHelper.floor(axisalignedbb.minX); int maxX = MathHelper.ceil(axisalignedbb.maxX); int minY = MathHelper.floor(axisalignedbb.minY - prevMotionY); int maxY = minY + 1; int minZ = MathHelper.floor(axisalignedbb.minZ); int maxZ = MathHelper.ceil(axisalignedbb.maxZ); BlockPos.Mutable pos = new BlockPos.Mutable(); float waterHeight = minY - 1; for (int y = maxY; y >= minY; --y) { for (int x = minX; x < maxX; x++) { for (int z = minZ; z < maxZ; ++z) { pos.setPos(x, y, z); FluidState fluidstate = this.world.getFluidState(pos); if (fluidstate.getFluid().isEquivalentTo(Fluids.WATER)) { waterHeight = Math.max(waterHeight, pos.getY() + fluidstate.getActualHeight(this.world, pos)); } if (waterHeight >= maxY) { return waterHeight; } } } } return waterHeight; } /** * Returns true if this entity should push and be pushed by other entities when colliding. */ @Override public boolean canBePushed() { return true; } @Override public double getYOffset() { return 0; } /** * Returns the Y offset from the entity's position for any entity riding this one. */ @Override public double getMountedYOffset() { return getHeight() - 1.1; } /** * For vehicles, the first passenger is generally considered the controller and "drives" the vehicle. For example, Pigs, Horses, and Boats are generally "steered" by the controlling passenger. */ @Override @Nullable public Entity getControllingPassenger() { List<Entity> list = this.getPassengers(); return list.isEmpty() ? null : list.get(0); } /** * Gets the horizontal facing direction of this Entity, adjusted to take specially-treated entity types into account. */ @Override public Direction getAdjustedHorizontalFacing() { return this.getHorizontalFacing().rotateY(); } @Override public boolean shouldRiderSit() { return false; } @Override public void writeSpawnData(PacketBuffer buffer) { buffer.writeDouble(this.lerpYaw); } @Override public void readSpawnData(PacketBuffer additionalData) { this.lerpYaw = MathHelper.wrapDegrees(additionalData.readDouble()); } @Override public ItemStack getPickedResult(RayTraceResult target) { return new ItemStack(TropicraftItems.BEACH_FLOATS.get(DyeColor.byId(getColor().getId())).get()); } }
src/main/java/net/tropicraft/core/common/entity/placeable/BeachFloatEntity.java
package net.tropicraft.core.common.entity.placeable; import com.google.common.collect.ImmutableList; import net.minecraft.block.BlockState; import net.minecraft.client.Minecraft; import net.minecraft.client.settings.PointOfView; import net.minecraft.entity.Entity; import net.minecraft.entity.EntityType; import net.minecraft.entity.LivingEntity; import net.minecraft.entity.MoverType; import net.minecraft.entity.player.PlayerEntity; import net.minecraft.fluid.FluidState; import net.minecraft.fluid.Fluids; import net.minecraft.item.DyeColor; import net.minecraft.item.ItemStack; import net.minecraft.network.PacketBuffer; import net.minecraft.tags.FluidTags; import net.minecraft.util.ActionResultType; import net.minecraft.util.Direction; import net.minecraft.util.Hand; import net.minecraft.util.SharedSeedRandom; import net.minecraft.util.math.AxisAlignedBB; import net.minecraft.util.math.BlockPos; import net.minecraft.util.math.MathHelper; import net.minecraft.util.math.RayTraceResult; import net.minecraft.world.World; import net.minecraft.world.gen.PerlinNoiseGenerator; import net.minecraftforge.fml.common.registry.IEntityAdditionalSpawnData; import net.tropicraft.core.common.item.TropicraftItems; import javax.annotation.Nonnull; import javax.annotation.Nullable; import java.util.List; import java.util.Random; public class BeachFloatEntity extends FurnitureEntity implements IEntityAdditionalSpawnData { @Nonnull private static final Random rand = new Random(298457L); @Nonnull private static final PerlinNoiseGenerator windNoise = new PerlinNoiseGenerator(new SharedSeedRandom(298457L), ImmutableList.of(0)); /* Wind */ private double windModifier = 0; /* Is any entity laying on the float? */ public boolean isEmpty; /* Acceleration */ public float rotationSpeed; /* Water checks */ private double prevMotionY; public BeachFloatEntity(EntityType<BeachFloatEntity> type, World worldIn) { super(type, worldIn, TropicraftItems.BEACH_FLOATS); this.ignoreFrustumCheck = true; this.isEmpty = true; this.preventEntitySpawning = true; this.entityCollisionReduction = .95F; setEntityId(this.getEntityId()); } @Override public void setEntityId(int id) { super.setEntityId(id); rand.setSeed(id); this.windModifier = (1 + (rand.nextGaussian() * 0.1)) - 0.05; } @Override public void tick() { Entity rider = getControllingPassenger(); if (world.isRemote && rider instanceof PlayerEntity) { PlayerEntity controller = (PlayerEntity) rider; float move = controller.moveForward; float rot = -controller.moveStrafing; rotationSpeed += rot * 0.25f; float ang = rotationYaw; float moveX = MathHelper.sin(-ang * 0.017453292F) * move * 0.0035f; float moveZ = MathHelper.cos(ang * 0.017453292F) * move * 0.0035f; setMotion(getMotion().add(moveX, 0, moveZ)); } if (this.inWater) { double windAng = (windNoise.noiseAt(getPosX() / 1000, getPosZ() / 1000, false) + 1) * Math.PI; double windX = Math.sin(windAng) * 0.0005 * windModifier; double windZ = Math.cos(windAng) * 0.0005 * windModifier; setMotion(getMotion().add(windX, 0, windZ)); // Rotate towards a target yaw with some random perturbance double targetYaw = Math.toDegrees(windAng) + ((windModifier - 1) * 45); double yaw = (MathHelper.wrapDegrees(this.rotationYaw) + 180 - 35) % 360; double angleDiff = targetYaw - yaw; if (angleDiff > 0) { this.rotationSpeed += Math.min(0.005 * windModifier, angleDiff); } else { this.rotationSpeed += Math.max(-0.005 * windModifier, angleDiff); } } double water = getWaterLevel(); double center = getCenterY(); double eps = 1 / 16D; if (water < center - eps) { // Gravity setMotion(getMotion().add(0, -MathHelper.clamp(center - water, 0, 0.04), 0)); } else if (water > center + eps) { double floatpush = MathHelper.clamp(water - center, 0, 0.02); setMotion(getMotion().add(0, floatpush, 0)); } else if (Math.abs(getMotion().y) < 0.02) { // Close enough, just force to the correct spot if (getMotion().y != 0) { lerpY = water - 0.011; } setMotion(getMotion().mul(1, 0, 1)); prevMotionY = 0; } super.tick(); rotationYaw += rotationSpeed; move(MoverType.PLAYER, getMotion()); setMotion(getMotion().mul(0.9, 0.9, 0.9)); rotationSpeed *= 0.9f; if (!this.world.isRemote) { List<Entity> list = this.world.getEntitiesWithinAABBExcludingEntity(this, this.getBoundingBox().grow(0.20000000298023224D, 0.0D, 0.20000000298023224D)); for (Entity entity : list) { if (entity != this.getControllingPassenger() && entity.canBePushed()) { entity.applyEntityCollision(this); } } if (this.getControllingPassenger() != null && !this.getControllingPassenger().isAlive()) { this.removePassengers(); } } } @Override protected boolean preventMotion() { return false; } private double getCenterY() { AxisAlignedBB bb = getBoundingBox(); return bb.minY + (bb.maxY - bb.minY) * 0.5D; } @Override protected void updateFallState(double y, boolean onGroundIn, BlockState state, BlockPos pos) { this.prevMotionY = this.getMotion().y; super.updateFallState(y, onGroundIn, state, pos); } @Override protected boolean func_233566_aG_() { this.eyesFluidLevel.clear(); this.updateWaterState(); boolean lava = this.handleFluidAcceleration(FluidTags.LAVA, this.world.getDimensionType().isUltrawarm() ? 0.007 : 0.0023333333333333335D); return this.isInWater() || lava; } void updateWaterState() { AxisAlignedBB temp = getBoundingBox(); setBoundingBox(temp.contract(1, 0, 1).contract(-1, 0.125, -1)); try { if (this.handleFluidAcceleration(FluidTags.WATER, 0.014D)) { if (!this.inWater && !this.firstUpdate) { this.doWaterSplashEffect(); } this.fallDistance = 0.0F; this.inWater = true; this.extinguish(); } else { this.inWater = false; } } finally { setBoundingBox(temp); } } @Override public ActionResultType processInitialInteract(PlayerEntity player, Hand hand) { if (!this.world.isRemote && !player.isSneaking()) { player.startRiding(this); return ActionResultType.SUCCESS; } return !player.isRidingSameEntity(this) ? ActionResultType.SUCCESS : ActionResultType.PASS; } /* Following three methods copied from EntityBoat for passenger updates */ @Override public void updatePassenger(@Nonnull Entity passenger) { if (this.isPassenger(passenger)) { // float yaw = this.rotationYaw; // passenger.setPosition(x, this.posY + this.getMountedYOffset() + passenger.getYOffset(), z); float f = 0.0F; float f1 = (float) ((!isAlive() ? 0.001 : this.getMountedYOffset()) + passenger.getYOffset()); if (this.getPassengers().size() > 1) { int i = this.getPassengers().indexOf(passenger); if (i == 0) { f = 0.2F; } else { f = -0.6F; } if (passenger instanceof LivingEntity) { f = (float) ((double) f + 0.2D); } } float len = 0.6f; double x = this.getPosX() + (-MathHelper.sin(-this.rotationYaw * 0.017453292F) * len); double z = this.getPosZ() + (-MathHelper.cos(this.rotationYaw * 0.017453292F) * len); passenger.setPosition(x, this.getPosY() + (double) f1, z); passenger.rotationYaw += this.rotationSpeed; passenger.setRotationYawHead(passenger.getRotationYawHead() + this.rotationSpeed); this.applyYawToEntity(passenger); if (passenger instanceof LivingEntity && this.getPassengers().size() > 1) { int j = passenger.getEntityId() % 2 == 0 ? 90 : 270; passenger.setRenderYawOffset(((LivingEntity) passenger).renderYawOffset + (float) j); passenger.setRotationYawHead(passenger.getRotationYawHead() + (float) j); } if (passenger instanceof PlayerEntity) { ((PlayerEntity) passenger).setBoundingBox(getBoundingBox().expand(0, 0.3, 0).contract(0, -0.1875, 0)); } } } @Override protected void removePassenger(Entity passenger) { super.removePassenger(passenger); if (passenger instanceof PlayerEntity) { passenger.recalculateSize(); } } protected void applyYawToEntity(Entity entityToUpdate) { if (!entityToUpdate.world.isRemote || isClientFirstPerson()) { entityToUpdate.setRenderYawOffset(this.rotationYaw); float yaw = MathHelper.wrapDegrees(entityToUpdate.rotationYaw - this.rotationYaw); float pitch = MathHelper.wrapDegrees(entityToUpdate.rotationPitch - this.rotationPitch); float clampedYaw = MathHelper.clamp(yaw, -105.0F, 105.0F); float clampedPitch = MathHelper.clamp(pitch, -100F, -10F); entityToUpdate.prevRotationYaw += clampedYaw - yaw; entityToUpdate.rotationYaw += clampedYaw - yaw; entityToUpdate.prevRotationPitch += clampedPitch - pitch; entityToUpdate.rotationPitch += clampedPitch - pitch; entityToUpdate.setRotationYawHead(entityToUpdate.rotationYaw); } } @Override public void applyOrientationToEntity(@Nonnull Entity entityToUpdate) { this.applyYawToEntity(entityToUpdate); } private boolean isClientFirstPerson() { return Minecraft.getInstance().gameSettings.getPointOfView() == PointOfView.FIRST_PERSON; } /* Again, from entity boat, for water checks */ private float getWaterLevel() { AxisAlignedBB axisalignedbb = this.getBoundingBox(); int minX = MathHelper.floor(axisalignedbb.minX); int maxX = MathHelper.ceil(axisalignedbb.maxX); int minY = MathHelper.floor(axisalignedbb.minY - prevMotionY); int maxY = minY + 1; int minZ = MathHelper.floor(axisalignedbb.minZ); int maxZ = MathHelper.ceil(axisalignedbb.maxZ); BlockPos.Mutable pos = new BlockPos.Mutable(); float waterHeight = minY - 1; for (int y = maxY; y >= minY; --y) { for (int x = minX; x < maxX; x++) { for (int z = minZ; z < maxZ; ++z) { pos.setPos(x, y, z); FluidState fluidstate = this.world.getFluidState(pos); if (fluidstate.getFluid().isEquivalentTo(Fluids.WATER)) { waterHeight = Math.max(waterHeight, pos.getY() + fluidstate.getActualHeight(this.world, pos)); } if (waterHeight >= maxY) { return waterHeight; } } } } return waterHeight; } /** * Returns true if this entity should push and be pushed by other entities when colliding. */ @Override public boolean canBePushed() { return true; } @Override public double getYOffset() { return 0; } /** * Returns the Y offset from the entity's position for any entity riding this one. */ @Override public double getMountedYOffset() { return getHeight() - 1.1; } /** * For vehicles, the first passenger is generally considered the controller and "drives" the vehicle. For example, Pigs, Horses, and Boats are generally "steered" by the controlling passenger. */ @Override @Nullable public Entity getControllingPassenger() { List<Entity> list = this.getPassengers(); return list.isEmpty() ? null : list.get(0); } /** * Gets the horizontal facing direction of this Entity, adjusted to take specially-treated entity types into account. */ @Override public Direction getAdjustedHorizontalFacing() { return this.getHorizontalFacing().rotateY(); } @Override public boolean shouldRiderSit() { return false; } @Override public void writeSpawnData(PacketBuffer buffer) { buffer.writeDouble(this.lerpYaw); } @Override public void readSpawnData(PacketBuffer additionalData) { this.lerpYaw = MathHelper.wrapDegrees(additionalData.readDouble()); } @Override public ItemStack getPickedResult(RayTraceResult target) { return new ItemStack(TropicraftItems.BEACH_FLOATS.get(DyeColor.byId(getColor().getId())).get()); } }
only apply rotation for own client player
src/main/java/net/tropicraft/core/common/entity/placeable/BeachFloatEntity.java
only apply rotation for own client player
Java
agpl-3.0
f6cf3b1d2181876026be26265806ca98ad266f1e
0
battlecode/battlecode-server,battlecode/battlecode-server
package battlecode.engine.instrumenter; import battlecode.engine.ErrorReporter; import org.objectweb.asm.Type; import org.objectweb.asm.signature.SignatureReader; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.InputStreamReader; import java.util.HashSet; import java.util.Set; import java.util.zip.ZipFile; /** * ClassReferenceUtil is a singleton used to keep track of class references during instrumentation. * <p/> * Whenever a class reference is encountered while instrumenting a class, that reference should be registered with * ClassReferenceUtil (e.g., using <code>classReference(...)</code>). This does two things. First, the class reference * may be replaced with a reference to a different class (e.g., replacing Random with RoboRandom). Second, * ClassReferenceUtil remembers new references. New class references can be retrieved using flushNewlyReferencedClasses. * * @author adamd */ public class ClassReferenceUtil { // packages for which the player is allowed to use any of the contained classes; loaded from AllowedPackages.txt private final static Set<String> allowedPackages; // a set of classes the player is not allowed to use; loaded from DisallowedClasses.txt private final static Set<String> disallowedClasses; // We can't instrument these classes because they have native methods. Java won't allow us // to create an instrumented class that has the same prefix as a builtin class, so we have to // change the name. But when we rename the class, it can't use the old class's native methods any more. // This might have issues so for now I'm just not instrumenting java.io // private final static Set<String> uninstrumentedClasses; private ClassReferenceUtil() { } static void fileLoadError(String filename) { ErrorReporter.report(String.format("Error loading %s",filename),String.format("Check that the '%s' file exists and is not corrupted.",filename)); throw new InstrumentationException(); } // the static constructor basically loads the whitelist files and caches them in allowedPackages and disallowedClasses static { BufferedReader reader; String line; ZipFile zfile; allowedPackages = new HashSet<String>(); disallowedClasses = new HashSet<String>(); // load allowed packages try { reader = new BufferedReader(new InputStreamReader(new FileInputStream("AllowedPackages.txt"))); while ((line = reader.readLine()) != null) { allowedPackages.add(line); } } catch (Exception e) { fileLoadError("AllowedPackages.txt"); } // load disallowed classes try { reader = new BufferedReader(new InputStreamReader(new FileInputStream("DisallowedClasses.txt"))); while ((line = reader.readLine()) != null) { disallowedClasses.add(line); } } catch (Exception e) { fileLoadError("DisallowedClasses.txt"); } } protected static boolean isInAllowedPackage(String className) { int dotIndex = className.lastIndexOf('/'); if (dotIndex == -1) return false; return allowedPackages.contains(className.substring(0, dotIndex)); } private static boolean shouldAddInstrumentedPrefix(String className) { if (className.startsWith("battlecode/")) { if (className.equals("battlecode/engine/instrumenter/lang/InstrumentableFunctions")) return true; else return false; } //if(className.startsWith("java/lang/")) // return false; if (className.startsWith("instrumented/")) return false; if (className.startsWith("java/util/jar") || className.startsWith("java/util/zip") || className.equals("java/util/concurrent/TimeUnit")) return false; if (className.equals("java/util/Iterator")) return false; if (className.startsWith("java/util/") || className.startsWith("java/math/")) return true; // We get a type mismatch if we instrument PrintStream but not System //if(uninstrumentedClasses.contains(className)) // return false; //if(isInAllowedPackage(className)) // return true; if (className.startsWith("sun/") || className.startsWith("com/") || className.startsWith("java/")) return false; //if(className.startsWith("sun/")|| // className.startsWith("com/")) // return false; return true; } /** * Registers a class reference, and may replace the reference with a reference to a different class. This method always returns * a class that should be referenced, even if the return value is the same as the given <code>className</code>. If this class has not been * referenced previously, the next call to flushNewlyReferencedClasses will return an array containing the given class (among others). * <p/> * If cR = classReference(cN,tPN,s,cD), then it should always be the case that * cR == classReference(cR,tPN,s,cD). If cR starts with instrumented/, then it should * also always be the case that cR == classReference(cR.substring(13),tPN,s,cD). * * @param className the name of the class that was referenced, in fully qualified form (e.g., "team666/navigation/Navigator") * @param teamPackageName the name of the team thaht referenced the given class * @return the name of the class that should replace this reference, in fully qualified form * @throws InstrumentationException if the class reference is not allowed */ public static String classReference(String className, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ans = classReferenceX(className, teamPackageName, silenced, checkDisallowed); //System.out.println("CR "+className+":"+ans); return ans; } public static String classReferenceX(String className, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (className == null) return null; if (className.charAt(0) == '[') { int arrayIndex = className.lastIndexOf('['); //System.out.println("what do I do with "+className); if (className.charAt(arrayIndex + 1) == 'L') { return className.substring(0, arrayIndex + 2) + classReference(className.substring(arrayIndex + 2), teamPackageName, silenced, checkDisallowed); } else { return className; } } else if (className.startsWith(teamPackageName + "/")) return className; else if (className.equals("java/lang/System")) return "battlecode/engine/instrumenter/lang/System"; else if (className.equals("java/util/concurrent/ConcurrentHashMap")) return "battlecode/engine/instrumenter/lang/ConcurrentHashMap"; else if (className.equals("java/util/concurrent/atomic/AtomicInteger")) return "battlecode/engine/instrumenter/lang/AtomicInteger"; else if (className.equals("java/util/concurrent/atomic/AtomicLong")) return "battlecode/engine/instrumenter/lang/AtomicLong"; else if (className.equals("java/util/concurrent/atomic/AtomicReference")) return "battlecode/engine/instrumenter/lang/AtomicReference"; else if (className.equals("sun/misc/Unsafe")) return "battlecode/engine/instrumenter/lang/Unsafe"; if (checkDisallowed) { if (disallowedClasses.contains(className) || !isInAllowedPackage(className)) { return illegalClass(className, teamPackageName); } } if (className.equals("java/security/SecureRandom")) return "instrumented/java/util/Random"; if (shouldAddInstrumentedPrefix(className)) return "instrumented/" + className; else return className; } /** * Registers a class reference (see <code>classReference(...)</code>), but with the class name in a different format (descriptor, instead * of binary form). * * @param classDesc descriptor of the class that was referenced (e.g., "Lteam666/navigation/Navigator;") * @param teamPackageName the name of the team that referenced the given class * @throws InstrumentationException if the class reference is not allowed. */ public static String classDescReference(String classDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ans = classDescReferenceX(classDesc, teamPackageName, silenced, checkDisallowed); //System.out.println("CDR "+classDesc+":"+ans); return ans; } public static String classDescReferenceX(String classDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (classDesc == null) return null; if (classDesc.charAt(0) == 'L') { return "L" + classReference(classDesc.substring(1, classDesc.length() - 1), teamPackageName, silenced, checkDisallowed) + ";"; } else if (classDesc.charAt(0) == '[') { int arrayIndex = classDesc.lastIndexOf('['); return classDesc.substring(0, arrayIndex + 1) + classDescReference(classDesc.substring(arrayIndex + 1, classDesc.length()), teamPackageName, silenced, checkDisallowed); } else { if (classDesc.length() > 1) System.out.println("unrecognized CDR " + classDesc); return classDesc; } } /** * Registers all the class references in a method descriptor, and replaces references as if classReference were called on each individual * reference. * * @param methodDesc descriptor for the method that was referenced (e.g., "(Ljava/util/Map;Z)Ljava/util/Set;") * @param teamPackageName the name of the team that referenced the given method * @throws InstrumentationException if any of the class references contained the the method descriptor are not allowed. */ public static String methodDescReference(String methodDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ret = "("; Type[] argTypes = Type.getArgumentTypes(methodDesc); for (int i = 0; i < argTypes.length; i++) { if (argTypes[i].getSort() == Type.ARRAY || argTypes[i].getSort() == Type.OBJECT) // HACK: whitelistSystem is set to true here b/c we're only replacing Object; once the whole library is replaced, this should be changed ret = ret + classDescReference(argTypes[i].toString(), teamPackageName, silenced, checkDisallowed); else ret = ret + argTypes[i].toString(); } ret = ret + ")"; Type returnType = Type.getReturnType(methodDesc); if (returnType.getSort() == Type.ARRAY || returnType.getSort() == Type.OBJECT) // HACK: whitelistSystem is set to true here b/c we're only replacing Object; once the whole library is replaced, this should be changed ret = ret + classDescReference(returnType.toString(), teamPackageName, silenced, checkDisallowed); else ret = ret + returnType.toString(); //System.out.println("mdr "+ret); return ret; } public static String methodSignatureReference(String signature, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (signature == null) return null; //System.out.println("meth "+signature); BattlecodeSignatureWriter writer = new BattlecodeSignatureWriter(teamPackageName, silenced, checkDisallowed); SignatureReader reader = new SignatureReader(signature); reader.accept(writer); //System.out.println("meth "+writer.toString()); return writer.toString(); } public static String fieldSignatureReference(String signature, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (signature == null) return null; //System.out.println("field "+signature); BattlecodeSignatureWriter writer = new BattlecodeSignatureWriter(teamPackageName, silenced, checkDisallowed); SignatureReader reader = new SignatureReader(signature); reader.acceptType(writer); //System.out.println("field "+writer.toString()); return writer.toString(); } // called whenever an illegal class is found; throws an InstrumentationException private static String illegalClass(String className, String teamPackageName) { if (InstrumentingClassLoader.lazy()) { return "forbidden/" + className; } else { ErrorReporter.report("Illegal class: " + className + "\nThis class cannot be referenced by player " + teamPackageName, false); InstrumentationException e = new InstrumentationException(); throw e; } } }
src/main/battlecode/engine/instrumenter/ClassReferenceUtil.java
package battlecode.engine.instrumenter; import battlecode.engine.ErrorReporter; import org.objectweb.asm.Type; import org.objectweb.asm.signature.SignatureReader; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.InputStreamReader; import java.util.HashSet; import java.util.Set; import java.util.zip.ZipFile; /** * ClassReferenceUtil is a singleton used to keep track of class references during instrumentation. * <p/> * Whenever a class reference is encountered while instrumenting a class, that reference should be registered with * ClassReferenceUtil (e.g., using <code>classReference(...)</code>). This does two things. First, the class reference * may be replaced with a reference to a different class (e.g., replacing Random with RoboRandom). Second, * ClassReferenceUtil remembers new references. New class references can be retrieved using flushNewlyReferencedClasses. * * @author adamd */ public class ClassReferenceUtil { // packages for which the player is allowed to use any of the contained classes; loaded from AllowedPackages.txt private final static Set<String> allowedPackages; // a set of classes the player is not allowed to use; loaded from DisallowedClasses.txt private final static Set<String> disallowedClasses; // We can't instrument these classes because they have native methods. Java won't allow us // to create an instrumented class that has the same prefix as a builtin class, so we have to // change the name. But when we rename the class, it can't use the old class's native methods any more. // This might have issues so for now I'm just not instrumenting java.io // private final static Set<String> uninstrumentedClasses; private ClassReferenceUtil() { } static void fileLoadError(String filename) { ErrorReporter.report(String.format("Error loading %s", "Check that the '%s' file exists and is not corrupted.", filename, filename)); throw new InstrumentationException(); } // the static constructor basically loads the whitelist files and caches them in allowedPackages and disallowedClasses static { BufferedReader reader; String line; ZipFile zfile; allowedPackages = new HashSet<String>(); disallowedClasses = new HashSet<String>(); // load allowed packages try { reader = new BufferedReader(new InputStreamReader(new FileInputStream("AllowedPackages.txt"))); while ((line = reader.readLine()) != null) { allowedPackages.add(line); } } catch (Exception e) { fileLoadError("AllowedPackages.txt"); } // load disallowed classes try { reader = new BufferedReader(new InputStreamReader(new FileInputStream("DisallowedClasses.txt"))); while ((line = reader.readLine()) != null) { disallowedClasses.add(line); } } catch (Exception e) { fileLoadError("DisallowedClasses.txt"); } } protected static boolean isInAllowedPackage(String className) { int dotIndex = className.lastIndexOf('/'); if (dotIndex == -1) return false; return allowedPackages.contains(className.substring(0, dotIndex)); } private static boolean shouldAddInstrumentedPrefix(String className) { if (className.startsWith("battlecode/")) { if (className.equals("battlecode/engine/instrumenter/lang/InstrumentableFunctions")) return true; else return false; } //if(className.startsWith("java/lang/")) // return false; if (className.startsWith("instrumented/")) return false; if (className.startsWith("java/util/jar") || className.startsWith("java/util/zip") || className.equals("java/util/concurrent/TimeUnit")) return false; if (className.equals("java/util/Iterator")) return false; if (className.startsWith("java/util/") || className.startsWith("java/math/")) return true; // We get a type mismatch if we instrument PrintStream but not System //if(uninstrumentedClasses.contains(className)) // return false; //if(isInAllowedPackage(className)) // return true; if (className.startsWith("sun/") || className.startsWith("com/") || className.startsWith("java/")) return false; //if(className.startsWith("sun/")|| // className.startsWith("com/")) // return false; return true; } /** * Registers a class reference, and may replace the reference with a reference to a different class. This method always returns * a class that should be referenced, even if the return value is the same as the given <code>className</code>. If this class has not been * referenced previously, the next call to flushNewlyReferencedClasses will return an array containing the given class (among others). * <p/> * If cR = classReference(cN,tPN,s,cD), then it should always be the case that * cR == classReference(cR,tPN,s,cD). If cR starts with instrumented/, then it should * also always be the case that cR == classReference(cR.substring(13),tPN,s,cD). * * @param className the name of the class that was referenced, in fully qualified form (e.g., "team666/navigation/Navigator") * @param teamPackageName the name of the team thaht referenced the given class * @return the name of the class that should replace this reference, in fully qualified form * @throws InstrumentationException if the class reference is not allowed */ public static String classReference(String className, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ans = classReferenceX(className, teamPackageName, silenced, checkDisallowed); //System.out.println("CR "+className+":"+ans); return ans; } public static String classReferenceX(String className, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (className == null) return null; if (className.charAt(0) == '[') { int arrayIndex = className.lastIndexOf('['); //System.out.println("what do I do with "+className); if (className.charAt(arrayIndex + 1) == 'L') { return className.substring(0, arrayIndex + 2) + classReference(className.substring(arrayIndex + 2), teamPackageName, silenced, checkDisallowed); } else { return className; } } else if (className.startsWith(teamPackageName + "/")) return className; else if (className.equals("java/lang/System")) return "battlecode/engine/instrumenter/lang/System"; else if (className.equals("java/util/concurrent/ConcurrentHashMap")) return "battlecode/engine/instrumenter/lang/ConcurrentHashMap"; else if (className.equals("java/util/concurrent/atomic/AtomicInteger")) return "battlecode/engine/instrumenter/lang/AtomicInteger"; else if (className.equals("java/util/concurrent/atomic/AtomicLong")) return "battlecode/engine/instrumenter/lang/AtomicLong"; else if (className.equals("java/util/concurrent/atomic/AtomicReference")) return "battlecode/engine/instrumenter/lang/AtomicReference"; else if (className.equals("sun/misc/Unsafe")) return "battlecode/engine/instrumenter/lang/Unsafe"; if (checkDisallowed) { if (disallowedClasses.contains(className) || !isInAllowedPackage(className)) { return illegalClass(className, teamPackageName); } } if (className.equals("java/security/SecureRandom")) return "instrumented/java/util/Random"; if (shouldAddInstrumentedPrefix(className)) return "instrumented/" + className; else return className; } /** * Registers a class reference (see <code>classReference(...)</code>), but with the class name in a different format (descriptor, instead * of binary form). * * @param classDesc descriptor of the class that was referenced (e.g., "Lteam666/navigation/Navigator;") * @param teamPackageName the name of the team that referenced the given class * @throws InstrumentationException if the class reference is not allowed. */ public static String classDescReference(String classDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ans = classDescReferenceX(classDesc, teamPackageName, silenced, checkDisallowed); //System.out.println("CDR "+classDesc+":"+ans); return ans; } public static String classDescReferenceX(String classDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (classDesc == null) return null; if (classDesc.charAt(0) == 'L') { return "L" + classReference(classDesc.substring(1, classDesc.length() - 1), teamPackageName, silenced, checkDisallowed) + ";"; } else if (classDesc.charAt(0) == '[') { int arrayIndex = classDesc.lastIndexOf('['); return classDesc.substring(0, arrayIndex + 1) + classDescReference(classDesc.substring(arrayIndex + 1, classDesc.length()), teamPackageName, silenced, checkDisallowed); } else { if (classDesc.length() > 1) System.out.println("unrecognized CDR " + classDesc); return classDesc; } } /** * Registers all the class references in a method descriptor, and replaces references as if classReference were called on each individual * reference. * * @param methodDesc descriptor for the method that was referenced (e.g., "(Ljava/util/Map;Z)Ljava/util/Set;") * @param teamPackageName the name of the team that referenced the given method * @throws InstrumentationException if any of the class references contained the the method descriptor are not allowed. */ public static String methodDescReference(String methodDesc, String teamPackageName, boolean silenced, boolean checkDisallowed) { String ret = "("; Type[] argTypes = Type.getArgumentTypes(methodDesc); for (int i = 0; i < argTypes.length; i++) { if (argTypes[i].getSort() == Type.ARRAY || argTypes[i].getSort() == Type.OBJECT) // HACK: whitelistSystem is set to true here b/c we're only replacing Object; once the whole library is replaced, this should be changed ret = ret + classDescReference(argTypes[i].toString(), teamPackageName, silenced, checkDisallowed); else ret = ret + argTypes[i].toString(); } ret = ret + ")"; Type returnType = Type.getReturnType(methodDesc); if (returnType.getSort() == Type.ARRAY || returnType.getSort() == Type.OBJECT) // HACK: whitelistSystem is set to true here b/c we're only replacing Object; once the whole library is replaced, this should be changed ret = ret + classDescReference(returnType.toString(), teamPackageName, silenced, checkDisallowed); else ret = ret + returnType.toString(); //System.out.println("mdr "+ret); return ret; } public static String methodSignatureReference(String signature, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (signature == null) return null; //System.out.println("meth "+signature); BattlecodeSignatureWriter writer = new BattlecodeSignatureWriter(teamPackageName, silenced, checkDisallowed); SignatureReader reader = new SignatureReader(signature); reader.accept(writer); //System.out.println("meth "+writer.toString()); return writer.toString(); } public static String fieldSignatureReference(String signature, String teamPackageName, boolean silenced, boolean checkDisallowed) { if (signature == null) return null; //System.out.println("field "+signature); BattlecodeSignatureWriter writer = new BattlecodeSignatureWriter(teamPackageName, silenced, checkDisallowed); SignatureReader reader = new SignatureReader(signature); reader.acceptType(writer); //System.out.println("field "+writer.toString()); return writer.toString(); } // called whenever an illegal class is found; throws an InstrumentationException private static String illegalClass(String className, String teamPackageName) { if (InstrumentingClassLoader.lazy()) { return "forbidden/" + className; } else { ErrorReporter.report("Illegal class: " + className + "\nThis class cannot be referenced by player " + teamPackageName, false); InstrumentationException e = new InstrumentationException(); throw e; } } }
fix unhelpful error message
src/main/battlecode/engine/instrumenter/ClassReferenceUtil.java
fix unhelpful error message
Java
lgpl-2.1
be09f90c307e76b38e3b3dfd8c5c1cb1d9336536
0
NCNecros/jtalks-common,jtalks-org/jtalks-common
/** * Copyright (C) 2011 JTalks.org Team * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jtalks.common.security.acl; import org.jtalks.common.model.entity.Entity; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.security.acls.domain.ObjectIdentityImpl; import org.springframework.security.acls.model.*; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.List; import static org.mockito.Mockito.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertSame; /** * @author stanislav bashkirtsev */ public class AclUtilTest { @Mock MutableAclService aclService; @Mock ObjectIdentityGenerator oidGenerator; AclUtil util; @BeforeMethod public void initMocks() { MockitoAnnotations.initMocks(this); util = new AclUtil(aclService); util.setObjectIdentityGenerator(oidGenerator); } @Test(dataProvider = "randomSidsAndPermissions", dataProviderClass = AclDataProvider.class) public void testDeletePermissionFromAcl(List<Sid> sids, List<Permission> permissions) throws Exception { ExtendedMutableAcl acl = mock(ExtendedMutableAcl.class); List<AccessControlEntry> entries = AclDataProvider.createEntries(acl, sids, permissions); when(acl.getEntries()).thenReturn(entries); util.deletePermissionsFromAcl(acl, sids, permissions); verify(acl).delete(entries); } @Test public void testGetAclForObjectIdentity() throws Exception { ObjectIdentity identity = new ObjectIdentityImpl(getClass().getName(), 1); MutableAcl mockAcl = mock(MutableAcl.class); when(aclService.readAclById(identity)).thenReturn(mockAcl); ExtendedMutableAcl extendedMutableAcl = util.getAclFor(identity); assertSame(extendedMutableAcl.getAcl(), mockAcl); } @Test @SuppressWarnings("unchecked") public void testGetAclForObjectIdentity_whichIsNotSavedSoFar() throws Exception { ObjectIdentity identity = new ObjectIdentityImpl(getClass().getName(), 1); MutableAcl mockAcl = mock(MutableAcl.class); when(aclService.readAclById(identity)).thenThrow(new NotFoundException("")); when(aclService.createAcl(identity)).thenReturn(mockAcl); ExtendedMutableAcl extendedMutableAcl = util.getAclFor(identity); assertSame(extendedMutableAcl.getAcl(), mockAcl); } @Test(dataProvider = "randomEntity", dataProviderClass = AclDataProvider.class) public void testCreateIdentityFor(Entity entity) throws Exception { ObjectIdentity identity = util.createIdentityFor(entity); assertEquals(identity.getType(), entity.getClass().getName()); assertEquals(identity.getIdentifier(), entity.getId()); } @Test(dataProvider = "notSavedEntity", dataProviderClass = AclDataProvider.class, expectedExceptions = IllegalArgumentException.class) public void testCreateIdentityFor_withWrongId(Entity entity) throws Exception { util.createIdentityFor(entity); } @Test public void testAclFromObjectIdentity_readAcl(){ ObjectIdentityImpl oid = new ObjectIdentityImpl("type", "1"); MutableAcl acl = mock(MutableAcl.class); when(oidGenerator.createObjectIdentity("1", "type")).thenReturn(oid); when(aclService.readAclById(oid)).thenReturn(acl); assertSame(((ExtendedMutableAcl)util.aclFromObjectIdentity("1", "type")).getAcl(), acl); } @Test public void testAclFromObjectIdentity_CreateAcl(){ ObjectIdentityImpl oid = new ObjectIdentityImpl("type", "1"); MutableAcl acl = mock(MutableAcl.class); when(oidGenerator.createObjectIdentity("1", "type")).thenReturn(oid); when(aclService.readAclById(oid)).thenThrow(new NotFoundException("")); when(aclService.createAcl(oid)).thenReturn(acl); assertSame(((ExtendedMutableAcl)util.aclFromObjectIdentity("1", "type")).getAcl(), acl); } }
jtalks-common-security/src/test/java/org/jtalks/common/security/acl/AclUtilTest.java
/** * Copyright (C) 2011 JTalks.org Team * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package org.jtalks.common.security.acl; import org.jtalks.common.model.entity.Entity; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.security.acls.domain.ObjectIdentityImpl; import org.springframework.security.acls.model.*; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import java.util.List; import static org.mockito.Mockito.*; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertSame; /** * @author stanislav bashkirtsev */ public class AclUtilTest { @Mock MutableAclService aclService; @Mock ObjectIdentityGenerator oidGenerator; AclUtil util; @BeforeMethod public void initMocks() { MockitoAnnotations.initMocks(this); util = new AclUtil(aclService); util.setObjectIdentityGenerator(oidGenerator); } @Test(dataProvider = "randomSidsAndPermissions", dataProviderClass = AclDataProvider.class) public void testDeletePermissionFromAcl(List<Sid> sids, List<Permission> permissions) throws Exception { ExtendedMutableAcl acl = mock(ExtendedMutableAcl.class); List<AccessControlEntry> entries = AclDataProvider.createEntries(acl, sids, permissions); when(acl.getEntries()).thenReturn(entries); util.deletePermissionsFromAcl(acl, sids, permissions); verify(acl).delete(entries); } @Test public void testGetAclForObjectIdentity() throws Exception { ObjectIdentity identity = new ObjectIdentityImpl(getClass().getName(), 1); MutableAcl mockAcl = mock(MutableAcl.class); when(aclService.readAclById(identity)).thenReturn(mockAcl); ExtendedMutableAcl extendedMutableAcl = util.getAclFor(identity); assertSame(extendedMutableAcl.getAcl(), mockAcl); } @Test @SuppressWarnings("unchecked") public void testGetAclForObjectIdentity_whichIsNotSavedSoFar() throws Exception { ObjectIdentity identity = new ObjectIdentityImpl(getClass().getName(), 1); MutableAcl mockAcl = mock(MutableAcl.class); when(aclService.readAclById(identity)).thenThrow(new NotFoundException("")); when(aclService.createAcl(identity)).thenReturn(mockAcl); ExtendedMutableAcl extendedMutableAcl = util.getAclFor(identity); assertSame(extendedMutableAcl.getAcl(), mockAcl); } @Test(dataProvider = "randomEntity", dataProviderClass = AclDataProvider.class) public void testCreateIdentityFor(Entity entity) throws Exception { ObjectIdentity identity = util.createIdentityFor(entity); assertEquals(identity.getType(), entity.getClass().getName()); assertEquals(identity.getIdentifier(), entity.getId()); } @Test(dataProvider = "notSavedEntity", dataProviderClass = AclDataProvider.class, expectedExceptions = IllegalArgumentException.class) public void testCreateIdentityFor_withWrongId(Entity entity) throws Exception { util.createIdentityFor(entity); } @Test public void testAclFromObjectIdentity(){ ObjectIdentityImpl oid = new ObjectIdentityImpl("type", "1"); MutableAcl acl = mock(MutableAcl.class); when(oidGenerator.createObjectIdentity("1", "type")).thenReturn(oid); when(aclService.readAclById(oid)).thenReturn(acl); assertSame(((ExtendedMutableAcl)util.aclFromObjectIdentity("1", "type")).getAcl(), acl); } }
Additional test for AclUtil
jtalks-common-security/src/test/java/org/jtalks/common/security/acl/AclUtilTest.java
Additional test for AclUtil
Java
lgpl-2.1
407b2fde09127fbc05dae5194addd9e8c90f40bf
0
CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine
/* * jETeL/CloverETL - Java based ETL application framework. * Copyright (c) Javlin, a.s. (info@cloveretl.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jetel.component; import java.io.IOException; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetel.data.DataField; import org.jetel.data.DataRecord; import org.jetel.data.DataRecordFactory; import org.jetel.data.Defaults; import org.jetel.data.FileRecordBuffer; import org.jetel.exception.AttributeNotFoundException; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.ConfigurationStatus; import org.jetel.exception.ConfigurationStatus.Priority; import org.jetel.exception.ConfigurationStatus.Severity; import org.jetel.exception.TransformException; import org.jetel.graph.InputPort; import org.jetel.graph.Node; import org.jetel.graph.OutputPort; import org.jetel.graph.Result; import org.jetel.graph.TransformationGraph; import org.jetel.graph.modelview.MVMetadata; import org.jetel.graph.modelview.impl.MetadataPropagationResolver; import org.jetel.metadata.DataFieldContainerType; import org.jetel.metadata.DataFieldMetadata; import org.jetel.metadata.DataFieldType; import org.jetel.metadata.DataRecordMetadata; import org.jetel.util.bytes.CloverBuffer; import org.jetel.util.primitive.TypedProperties; import org.jetel.util.property.ComponentXMLAttributes; import org.jetel.util.property.RefResFlag; import org.jetel.util.string.StringUtils; import org.w3c.dom.Element; /** * CrossJoin component, also known as CartesianProduct * @author salamonp (info@cloveretl.com) * (c) Javlin, a.s. (www.cloveretl.com) * * @created 1. 12. 2014 */ public class CrossJoin extends Node implements MetadataProvider { public final static String COMPONENT_TYPE = "CROSS_JOIN"; private final static String OUT_METADATA_NAME = "CrossJoin_dynamic"; private final static String OUT_METADATA_ID_SUFFIX = "_outMetadata"; private static final String XML_TRANSFORMCLASS_ATTRIBUTE = "transformClass"; private static final String XML_TRANSFORM_ATTRIBUTE = "transform"; private static final String XML_TRANSFORMURL_ATTRIBUTE = "transformURL"; private static final String XML_CHARSET_ATTRIBUTE = "charset"; private final static int WRITE_TO_PORT = 0; private final static int MASTER_PORT = 0; private final static int FIRST_SLAVE_PORT = 1; /** Amount of memory for records from each slave port. When memory is full, the records are swapped to disk. */ private final static int SLAVE_BUFFER_SIZE = Defaults.Record.RECORDS_BUFFER_SIZE; // 256 KB // attributes private String transformClassName; private String transformSource; private String transformURL; private String charset; private RecordTransform transformation; private Properties transformationParameters; // slaves management private int slaveCount; private boolean[] slaveFinishedReading; private ShiftingFileBuffer[] slaveRecordsMemory; /** Record buffer for slave records */ private CloverBuffer data = CloverBuffer.allocateDirect(Defaults.Record.RECORD_INITIAL_SIZE); /** Helper variable, needed for maintaining reference to "data" buffer */ private CloverBuffer recordInMemory; // input private InputPort masterPort; private InputPort[] slavePorts; private DataRecord masterRecord; private DataRecord[] slaveRecords; // output private OutputPort outPort; private DataRecord[] outRecord; // size 1 static Log logger = LogFactory.getLog(CrossJoin.class); public CrossJoin(String id, String transform, String transformUrl, String transformClass, String charset) { super(id); this.transformSource = transform; this.transformURL = transformUrl; this.transformClassName = transformClass; this.charset = charset; } @Override public void init() throws ComponentNotReadyException { super.init(); DataRecordMetadata[] outMetadata = new DataRecordMetadata[] { getOutputPort(WRITE_TO_PORT).getMetadata() }; DataRecordMetadata[] inMetadata = getInMetadataArray(); createTransformIfPossible(inMetadata, outMetadata); // init transformation if (transformation != null && !transformation.init(transformationParameters, inMetadata, outMetadata)) { throw new ComponentNotReadyException("Error when initializing tranformation function."); } } private void createTransformIfPossible(DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata) { if (transformSource != null || transformURL != null || transformClassName != null) { transformation = getTransformFactory(inMetadata, outMetadata).createTransform(); } } private TransformFactory<RecordTransform> getTransformFactory(DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata) { TransformFactory<RecordTransform> transformFactory = TransformFactory.createTransformFactory(RecordTransformDescriptor.newInstance()); transformFactory.setTransform(transformSource); transformFactory.setTransformClass(transformClassName); transformFactory.setTransformUrl(transformURL); transformFactory.setCharset(charset); transformFactory.setComponent(this); transformFactory.setInMetadata(inMetadata); transformFactory.setOutMetadata(outMetadata); return transformFactory; } @Override public void preExecute() throws ComponentNotReadyException { super.preExecute(); if (transformation != null) { transformation.preExecute(); } slaveCount = inPorts.size() - 1; //init input masterPort = getInputPort(MASTER_PORT); masterRecord = DataRecordFactory.newRecord(masterPort.getMetadata()); masterRecord.init(); slavePorts = new InputPort[slaveCount]; slaveRecords = new DataRecord[slaveCount]; slaveFinishedReading = new boolean[slaveCount]; slaveRecordsMemory = new ShiftingFileBuffer[slaveCount]; for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { slavePorts[slaveIdx] = getInputPort(FIRST_SLAVE_PORT + slaveIdx); slaveRecords[slaveIdx] = DataRecordFactory.newRecord(slavePorts[slaveIdx].getMetadata()); slaveRecords[slaveIdx].init(); slaveFinishedReading[slaveIdx] = false; slaveRecordsMemory[slaveIdx] = new ShiftingFileBuffer(SLAVE_BUFFER_SIZE); } // init output outPort = getOutputPort(WRITE_TO_PORT); outRecord = new DataRecord[] { DataRecordFactory.newRecord(outPort.getMetadata()) }; outRecord[WRITE_TO_PORT].init(); outRecord[WRITE_TO_PORT].reset(); } @Override public void postExecute() throws ComponentNotReadyException { super.postExecute(); if (transformation != null) { transformation.postExecute(); } } @Override public void free() { super.free(); try { for (int i = 0; i < slaveRecordsMemory.length; i++) { slaveRecordsMemory[i].close(); } } catch (IOException e) { logger.debug("Exception while clearing slave records memory of " + this.getName() + ". Message: " + e.getMessage()); } } /** * Concatenates passed DataRecord array and writes it to the output port. * @param currentRecords * @throws IOException * @throws InterruptedException * @throws TransformException */ private void writeRecord(DataRecord[] currentRecords) throws IOException, InterruptedException, TransformException { if (transformation != null) { int transformResult; try { transformResult = transformation.transform(currentRecords, outRecord); } catch (Exception exception) { transformResult = transformation.transformOnError(exception, currentRecords, outRecord); } } else { int outFieldIndex = 0; DataField[] outFields = outRecord[WRITE_TO_PORT].getFields(); for (DataRecord rec : currentRecords) { for (DataField field : rec.getFields()) { outFields[outFieldIndex].setValue(field); outFieldIndex++; } } } outPort.writeRecord(outRecord[WRITE_TO_PORT]); outRecord[WRITE_TO_PORT].reset(); } /** * Reads record from specified slave port and inserts it at the current position of the iterator. * Subsequent call to iterator.next() will return the newly read record. * @param slaveIdx * @param iter * @throws IOException * @throws InterruptedException */ private DataRecord readSlaveRecord(int slaveIdx) throws IOException, InterruptedException { if (slavePorts[slaveIdx].readRecord(slaveRecords[slaveIdx]) == null) { // no more input data slaveFinishedReading[slaveIdx] = true; return null; } return slaveRecords[slaveIdx]; } /** * Recursive method performing the logic of Cartesian product. * @param currentRecords * @param slaveIdx * @throws IOException * @throws InterruptedException * @throws TransformException */ private void recursiveAppendSlaveRecord(DataRecord[] currentRecords, int slaveIdx) throws IOException, InterruptedException, TransformException { if (slaveIdx >= slaveCount) { writeRecord(currentRecords); return; } slaveRecordsMemory[slaveIdx].rewind(); data.clear(); while (runIt && ((recordInMemory = slaveRecordsMemory[slaveIdx].shift(data)) != null || !slaveFinishedReading[slaveIdx])) { if (recordInMemory == null) { // no record in memory any more, we need to read more DataRecord slaveRecord = readSlaveRecord(slaveIdx); if (slaveRecord == null) { // all records read from this slave break; } else { slaveRecord.serialize(data); data.flip(); slaveRecordsMemory[slaveIdx].push(data); currentRecords[slaveIdx + 1] = slaveRecord; } } else { // record found in memory // At this point, data and recordInMemory are actually the same buffer instance. data.flip(); currentRecords[slaveIdx + 1].deserialize(data); } recursiveAppendSlaveRecord(currentRecords, slaveIdx + 1); data.clear(); } } @Override protected Result execute() throws IOException, InterruptedException, TransformException { DataRecord[] currentRecords = new DataRecord[slaveCount + 1]; //master and slaves for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { currentRecords[slaveIdx + 1] = slaveRecords[slaveIdx].duplicate(); } while (runIt && masterPort.readRecord(masterRecord) != null) { currentRecords[0] = masterRecord.duplicate(); recursiveAppendSlaveRecord(currentRecords, 0); //SynchronizeUtils.cloverYield(); } setEOF(WRITE_TO_PORT); ensureAllRecordsRead(); return (runIt ? Result.FINISHED_OK : Result.ABORTED); } /** * When no records are received from some input port, some records may be hanging on other * input ports because they were not needed to produce correct result. We need to read these * hanging records because clover doesn't like unread input records. This method just reads * all input records and throws them away. * @throws IOException * @throws InterruptedException */ private void ensureAllRecordsRead() throws IOException, InterruptedException { for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { if (!slaveFinishedReading[slaveIdx]) { while (runIt && slavePorts[slaveIdx].readRecord(slaveRecords[slaveIdx]) != null) { // just blank read here } } } } public static Node fromXML(TransformationGraph graph, Element xmlElement) throws AttributeNotFoundException { ComponentXMLAttributes xattribs = new ComponentXMLAttributes(xmlElement, graph); CrossJoin join = new CrossJoin(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getStringEx(XML_TRANSFORM_ATTRIBUTE, null, RefResFlag.SPEC_CHARACTERS_OFF), xattribs.getStringEx(XML_TRANSFORMURL_ATTRIBUTE, null, RefResFlag.URL), xattribs.getString(XML_TRANSFORMCLASS_ATTRIBUTE, null), xattribs.getString(XML_CHARSET_ATTRIBUTE, null) ); join.setTransformationParameters(xattribs.attributes2Properties( new String[] {XML_ID_ATTRIBUTE, XML_TRANSFORM_ATTRIBUTE, XML_TRANSFORMCLASS_ATTRIBUTE})); return join; } @Override public ConfigurationStatus checkConfig(ConfigurationStatus status) { super.checkConfig(status); if (!checkInputPorts(status, 1, Integer.MAX_VALUE) || !checkOutputPorts(status, 1, 1)) { return status; } DataRecordMetadata[] outMeta = new DataRecordMetadata[] { getOutputPort(WRITE_TO_PORT).getMetadata() }; DataRecordMetadata[] inMeta = getInMetadataArray(); getTransformFactory(inMeta, outMeta).checkConfig(status); if (transformSource == null && transformURL == null && transformClassName == null) { DataRecordMetadata expectedOutMetadata = getConcatenatedMetadata(null); DataRecordMetadata outMetadata = getOutputPort(WRITE_TO_PORT).getMetadata(); DataFieldMetadata[] expectedFields = expectedOutMetadata.getFields(); DataFieldMetadata[] outFields = outMetadata.getFields(); if (outFields.length < expectedFields.length) { status.add("Incompatible metadata on output. Not enough fields in output metadata.", Severity.ERROR, this, Priority.NORMAL); return status; } for (int i = 0; i < expectedFields.length; i++) { DataFieldType expectedType = expectedFields[i].getDataType(); DataFieldType outType = outFields[i].getDataType(); DataFieldContainerType expectedContainer = expectedFields[i].getContainerType(); DataFieldContainerType outContainer = outFields[i].getContainerType(); if (expectedType != outType || expectedContainer != outContainer) { StringBuilder sb = new StringBuilder("Incompatible metadata on output. Expected type "); if (expectedContainer == null || expectedContainer == DataFieldContainerType.SINGLE) { sb.append(StringUtils.quote(expectedType.toString())); } else if (expectedContainer == DataFieldContainerType.LIST) { sb.append(StringUtils.quote("list[" + expectedType + "]")); } else if (expectedContainer == DataFieldContainerType.MAP) { sb.append(StringUtils.quote("map[" + expectedType + "]")); } sb.append(" on position " + (i + 1) + ". Found: "); if (outContainer == null || outContainer == DataFieldContainerType.SINGLE) { sb.append(StringUtils.quote(outType.toString())); } else if (outContainer == DataFieldContainerType.LIST) { sb.append(StringUtils.quote("list[" + outType + "]")); } else if (outContainer == DataFieldContainerType.MAP) { sb.append(StringUtils.quote("map[" + outType + "]")); } sb.append("."); status.add(sb.toString(), Severity.ERROR, this, Priority.NORMAL); } } } return status; } /** * Produces metadata for output based on metadata on inputs. The goal is to produce exactly the metadata user * wants in most cases. * * Output metadata are made by copying metadata on first input port and copying all fields from other input ports. * @return */ private DataRecordMetadata getConcatenatedMetadata(DataRecordMetadata[] inputMetadata) { if (inputMetadata == null) { inputMetadata = new DataRecordMetadata[inPorts.size()]; for (int i = 0; i < inputMetadata.length; i++) { inputMetadata[i] = getInputPort(i).getMetadata(); } } DataRecordMetadata outMeta = inputMetadata[0].duplicate(); for (int i = 1; i < inputMetadata.length; i++) { for (DataFieldMetadata inFieldMeta : inputMetadata[i].getFields()) { outMeta.addField(inFieldMeta.duplicate()); } } outMeta.setLabel(null); // because of normalization which is done later (it copies label into name, we don't want that) outMeta.setName(OUT_METADATA_NAME); TypedProperties props = outMeta.getRecordProperties(); props.clear(); // clear GUI properties (preview attachment etc.) DataRecordMetadata.normalizeMetadata(outMeta); return outMeta; } @Override public MVMetadata getInputMetadata(int portIndex, MetadataPropagationResolver metadataPropagationResolver) { return null; } @Override public MVMetadata getOutputMetadata(int portIndex, MetadataPropagationResolver metadataPropagationResolver) { if (portIndex == WRITE_TO_PORT) { DataRecordMetadata[] inputMetadata = new DataRecordMetadata[inPorts.size()]; int index = 0; for (InputPort port : inPorts.values()) { MVMetadata metadata = metadataPropagationResolver.findMetadata(port.getEdge()); if (metadata == null) { return null; } inputMetadata[index] = metadata.getModel(); index++; } return metadataPropagationResolver.createMVMetadata(getConcatenatedMetadata(inputMetadata), this, OUT_METADATA_ID_SUFFIX); } return null; } public void setTransformationParameters(Properties transformationParameters) { this.transformationParameters = transformationParameters; } /** * This implementation provides no records immediately after writing and allows reading only from the beginning using rewind(). */ private static class ShiftingFileBuffer extends FileRecordBuffer { public ShiftingFileBuffer(int bufferSize) { super(bufferSize); } @Override public void push(CloverBuffer data) throws IOException { super.push(data); readPosition = writePosition; } } }
cloveretl.component/src/org/jetel/component/CrossJoin.java
/* * jETeL/CloverETL - Java based ETL application framework. * Copyright (c) Javlin, a.s. (info@cloveretl.com) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jetel.component; import java.io.IOException; import java.util.Properties; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetel.data.DataField; import org.jetel.data.DataRecord; import org.jetel.data.DataRecordFactory; import org.jetel.data.Defaults; import org.jetel.data.FileRecordBuffer; import org.jetel.exception.AttributeNotFoundException; import org.jetel.exception.ComponentNotReadyException; import org.jetel.exception.ConfigurationStatus; import org.jetel.exception.ConfigurationStatus.Priority; import org.jetel.exception.ConfigurationStatus.Severity; import org.jetel.exception.TransformException; import org.jetel.graph.InputPort; import org.jetel.graph.Node; import org.jetel.graph.OutputPort; import org.jetel.graph.Result; import org.jetel.graph.TransformationGraph; import org.jetel.graph.modelview.MVMetadata; import org.jetel.graph.modelview.impl.MetadataPropagationResolver; import org.jetel.metadata.DataFieldContainerType; import org.jetel.metadata.DataFieldMetadata; import org.jetel.metadata.DataFieldType; import org.jetel.metadata.DataRecordMetadata; import org.jetel.util.bytes.CloverBuffer; import org.jetel.util.primitive.TypedProperties; import org.jetel.util.property.ComponentXMLAttributes; import org.jetel.util.property.RefResFlag; import org.jetel.util.string.StringUtils; import org.w3c.dom.Element; /** * CrossJoin component, also known as CartesianProduct * @author salamonp (info@cloveretl.com) * (c) Javlin, a.s. (www.cloveretl.com) * * @created 1. 12. 2014 */ public class CrossJoin extends Node implements MetadataProvider { public final static String COMPONENT_TYPE = "CROSS_JOIN"; private final static String OUT_METADATA_NAME = "CrossJoin_dynamic"; private final static String OUT_METADATA_ID_SUFFIX = "_outMetadata"; private static final String XML_TRANSFORMCLASS_ATTRIBUTE = "transformClass"; private static final String XML_TRANSFORM_ATTRIBUTE = "transform"; private static final String XML_TRANSFORMURL_ATTRIBUTE = "transformURL"; private static final String XML_CHARSET_ATTRIBUTE = "charset"; private final static int WRITE_TO_PORT = 0; private final static int MASTER_PORT = 0; private final static int FIRST_SLAVE_PORT = 1; /** Amount of memory for records from each slave port. When memory is full, the records are swapped to disk. */ private final static int SLAVE_BUFFER_SIZE = Defaults.Record.RECORD_INITIAL_SIZE * 8; // 512 KB // attributes private String transformClassName; private String transformSource; private String transformURL; private String charset; private RecordTransform transformation; private Properties transformationParameters; // slaves management private int slaveCount; private boolean[] slaveFinishedReading; private ShiftingFileBuffer[] slaveRecordsMemory; /** Record buffer for slave records */ private CloverBuffer data = CloverBuffer.allocateDirect(org.jetel.data.Defaults.Record.RECORD_INITIAL_SIZE); /** Helper variable, needed for maintaining reference to "data" buffer */ private CloverBuffer recordInMemory; // input private InputPort masterPort; private InputPort[] slavePorts; private DataRecord masterRecord; private DataRecord[] slaveRecords; // output private OutputPort outPort; private DataRecord[] outRecord; // size 1 static Log logger = LogFactory.getLog(CrossJoin.class); public CrossJoin(String id, String transform, String transformUrl, String transformClass, String charset) { super(id); this.transformSource = transform; this.transformURL = transformUrl; this.transformClassName = transformClass; this.charset = charset; } @Override public void init() throws ComponentNotReadyException { super.init(); DataRecordMetadata[] outMetadata = new DataRecordMetadata[] { getOutputPort(WRITE_TO_PORT).getMetadata() }; DataRecordMetadata[] inMetadata = getInMetadataArray(); createTransformIfPossible(inMetadata, outMetadata); // init transformation if (transformation != null && !transformation.init(transformationParameters, inMetadata, outMetadata)) { throw new ComponentNotReadyException("Error when initializing tranformation function."); } } private void createTransformIfPossible(DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata) { if (transformSource != null || transformURL != null || transformClassName != null) { transformation = getTransformFactory(inMetadata, outMetadata).createTransform(); } } private TransformFactory<RecordTransform> getTransformFactory(DataRecordMetadata[] inMetadata, DataRecordMetadata[] outMetadata) { TransformFactory<RecordTransform> transformFactory = TransformFactory.createTransformFactory(RecordTransformDescriptor.newInstance()); transformFactory.setTransform(transformSource); transformFactory.setTransformClass(transformClassName); transformFactory.setTransformUrl(transformURL); transformFactory.setCharset(charset); transformFactory.setComponent(this); transformFactory.setInMetadata(inMetadata); transformFactory.setOutMetadata(outMetadata); return transformFactory; } @Override public void preExecute() throws ComponentNotReadyException { super.preExecute(); if (transformation != null) { transformation.preExecute(); } slaveCount = inPorts.size() - 1; //init input masterPort = getInputPort(MASTER_PORT); masterRecord = DataRecordFactory.newRecord(masterPort.getMetadata()); masterRecord.init(); slavePorts = new InputPort[slaveCount]; slaveRecords = new DataRecord[slaveCount]; slaveFinishedReading = new boolean[slaveCount]; slaveRecordsMemory = new ShiftingFileBuffer[slaveCount]; for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { slavePorts[slaveIdx] = getInputPort(FIRST_SLAVE_PORT + slaveIdx); slaveRecords[slaveIdx] = DataRecordFactory.newRecord(slavePorts[slaveIdx].getMetadata()); slaveRecords[slaveIdx].init(); slaveFinishedReading[slaveIdx] = false; slaveRecordsMemory[slaveIdx] = new ShiftingFileBuffer(SLAVE_BUFFER_SIZE); } // init output outPort = getOutputPort(WRITE_TO_PORT); outRecord = new DataRecord[] { DataRecordFactory.newRecord(outPort.getMetadata()) }; outRecord[WRITE_TO_PORT].init(); outRecord[WRITE_TO_PORT].reset(); } @Override public void postExecute() throws ComponentNotReadyException { super.postExecute(); if (transformation != null) { transformation.postExecute(); } } @Override public void free() { super.free(); try { for (int i = 0; i < slaveRecordsMemory.length; i++) { slaveRecordsMemory[i].close(); } } catch (IOException e) { logger.debug("Exception while clearing slave records memory of " + this.getName() + ". Message: " + e.getMessage()); } } /** * Concatenates passed DataRecord array and writes it to the output port. * @param currentRecords * @throws IOException * @throws InterruptedException * @throws TransformException */ private void writeRecord(DataRecord[] currentRecords) throws IOException, InterruptedException, TransformException { if (transformation != null) { int transformResult; try { transformResult = transformation.transform(currentRecords, outRecord); } catch (Exception exception) { transformResult = transformation.transformOnError(exception, currentRecords, outRecord); } } else { int outFieldIndex = 0; DataField[] outFields = outRecord[WRITE_TO_PORT].getFields(); for (DataRecord rec : currentRecords) { for (DataField field : rec.getFields()) { outFields[outFieldIndex].setValue(field); outFieldIndex++; } } } outPort.writeRecord(outRecord[WRITE_TO_PORT]); outRecord[WRITE_TO_PORT].reset(); } /** * Reads record from specified slave port and inserts it at the current position of the iterator. * Subsequent call to iterator.next() will return the newly read record. * @param slaveIdx * @param iter * @throws IOException * @throws InterruptedException */ private DataRecord readSlaveRecord(int slaveIdx) throws IOException, InterruptedException { if (slavePorts[slaveIdx].readRecord(slaveRecords[slaveIdx]) == null) { // no more input data slaveFinishedReading[slaveIdx] = true; return null; } return slaveRecords[slaveIdx]; } /** * Recursive method performing the logic of Cartesian product. * @param currentRecords * @param slaveIdx * @throws IOException * @throws InterruptedException * @throws TransformException */ private void recursiveAppendSlaveRecord(DataRecord[] currentRecords, int slaveIdx) throws IOException, InterruptedException, TransformException { if (slaveIdx >= slaveCount) { writeRecord(currentRecords); return; } slaveRecordsMemory[slaveIdx].rewind(); data.clear(); while (runIt && ((recordInMemory = slaveRecordsMemory[slaveIdx].shift(data)) != null || !slaveFinishedReading[slaveIdx])) { if (recordInMemory == null) { // no record in memory any more, we need to read more DataRecord slaveRecord = readSlaveRecord(slaveIdx); if (slaveRecord == null) { // all records read from this slave break; } else { slaveRecord.serialize(data); data.flip(); slaveRecordsMemory[slaveIdx].push(data); currentRecords[slaveIdx + 1] = slaveRecord; } } else { // record found in memory // At this point, data and recordInMemory are actually the same buffer instance. data.flip(); currentRecords[slaveIdx + 1].deserialize(data); } recursiveAppendSlaveRecord(currentRecords, slaveIdx + 1); data.clear(); } } @Override protected Result execute() throws IOException, InterruptedException, TransformException { DataRecord[] currentRecords = new DataRecord[slaveCount + 1]; //master and slaves for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { currentRecords[slaveIdx + 1] = slaveRecords[slaveIdx].duplicate(); } while (runIt && masterPort.readRecord(masterRecord) != null) { currentRecords[0] = masterRecord.duplicate(); recursiveAppendSlaveRecord(currentRecords, 0); //SynchronizeUtils.cloverYield(); } setEOF(WRITE_TO_PORT); ensureAllRecordsRead(); return (runIt ? Result.FINISHED_OK : Result.ABORTED); } /** * When no records are received from some input port, some records may be hanging on other * input ports because they were not needed to produce correct result. We need to read these * hanging records because clover doesn't like unread input records. This method just reads * all input records and throws them away. * @throws IOException * @throws InterruptedException */ private void ensureAllRecordsRead() throws IOException, InterruptedException { for (int slaveIdx = 0; slaveIdx < slaveCount; slaveIdx++) { if (!slaveFinishedReading[slaveIdx]) { while (runIt && slavePorts[slaveIdx].readRecord(slaveRecords[slaveIdx]) != null) { // just blank read here } } } } public static Node fromXML(TransformationGraph graph, Element xmlElement) throws AttributeNotFoundException { ComponentXMLAttributes xattribs = new ComponentXMLAttributes(xmlElement, graph); CrossJoin join = new CrossJoin(xattribs.getString(XML_ID_ATTRIBUTE), xattribs.getStringEx(XML_TRANSFORM_ATTRIBUTE, null, RefResFlag.SPEC_CHARACTERS_OFF), xattribs.getStringEx(XML_TRANSFORMURL_ATTRIBUTE, null, RefResFlag.URL), xattribs.getString(XML_TRANSFORMCLASS_ATTRIBUTE, null), xattribs.getString(XML_CHARSET_ATTRIBUTE, null) ); join.setTransformationParameters(xattribs.attributes2Properties( new String[] {XML_ID_ATTRIBUTE, XML_TRANSFORM_ATTRIBUTE, XML_TRANSFORMCLASS_ATTRIBUTE})); return join; } @Override public ConfigurationStatus checkConfig(ConfigurationStatus status) { super.checkConfig(status); if (!checkInputPorts(status, 1, Integer.MAX_VALUE) || !checkOutputPorts(status, 1, 1)) { return status; } DataRecordMetadata[] outMeta = new DataRecordMetadata[] { getOutputPort(WRITE_TO_PORT).getMetadata() }; DataRecordMetadata[] inMeta = getInMetadataArray(); createTransformIfPossible(inMeta, outMeta); if (transformation == null) { DataRecordMetadata expectedOutMetadata = getConcatenatedMetadata(null); DataRecordMetadata outMetadata = getOutputPort(WRITE_TO_PORT).getMetadata(); DataFieldMetadata[] expectedFields = expectedOutMetadata.getFields(); DataFieldMetadata[] outFields = outMetadata.getFields(); if (outFields.length < expectedFields.length) { status.add("Incompatible metadata on output. Not enough fields in output metadata.", Severity.ERROR, this, Priority.NORMAL); return status; } for (int i = 0; i < expectedFields.length; i++) { DataFieldType expectedType = expectedFields[i].getDataType(); DataFieldType outType = outFields[i].getDataType(); DataFieldContainerType expectedContainer = expectedFields[i].getContainerType(); DataFieldContainerType outContainer = outFields[i].getContainerType(); if (expectedType != outType || expectedContainer != outContainer) { StringBuilder sb = new StringBuilder("Incompatible metadata on output. Expected type "); if (expectedContainer == null || expectedContainer == DataFieldContainerType.SINGLE) { sb.append(StringUtils.quote(expectedType.toString())); } else if (expectedContainer == DataFieldContainerType.LIST) { sb.append(StringUtils.quote("list[" + expectedType + "]")); } else if (expectedContainer == DataFieldContainerType.MAP) { sb.append(StringUtils.quote("map[" + expectedType + "]")); } sb.append(" on position " + (i + 1) + ". Found: "); if (outContainer == null || outContainer == DataFieldContainerType.SINGLE) { sb.append(StringUtils.quote(outType.toString())); } else if (outContainer == DataFieldContainerType.LIST) { sb.append(StringUtils.quote("list[" + outType + "]")); } else if (outContainer == DataFieldContainerType.MAP) { sb.append(StringUtils.quote("map[" + outType + "]")); } sb.append("."); status.add(sb.toString(), Severity.ERROR, this, Priority.NORMAL); } } } return status; } /** * Produces metadata for output based on metadata on inputs. The goal is to produce exactly the metadata user * wants in most cases. * * Output metadata are made by copying metadata on first input port and copying all fields from other input ports. * @return */ private DataRecordMetadata getConcatenatedMetadata(DataRecordMetadata[] inputMetadata) { if (inputMetadata == null) { inputMetadata = new DataRecordMetadata[inPorts.size()]; for (int i = 0; i < inputMetadata.length; i++) { inputMetadata[i] = getInputPort(i).getMetadata(); } } DataRecordMetadata outMeta = inputMetadata[0].duplicate(); for (int i = 1; i < inputMetadata.length; i++) { for (DataFieldMetadata inFieldMeta : inputMetadata[i].getFields()) { outMeta.addField(inFieldMeta.duplicate()); } } outMeta.setLabel(null); // because of normalization which is done later (it copies label into name, we don't want that) outMeta.setName(OUT_METADATA_NAME); TypedProperties props = outMeta.getRecordProperties(); props.clear(); // clear GUI properties (preview attachment etc.) DataRecordMetadata.normalizeMetadata(outMeta); return outMeta; } @Override public MVMetadata getInputMetadata(int portIndex, MetadataPropagationResolver metadataPropagationResolver) { return null; } @Override public MVMetadata getOutputMetadata(int portIndex, MetadataPropagationResolver metadataPropagationResolver) { if (portIndex == WRITE_TO_PORT) { DataRecordMetadata[] inputMetadata = new DataRecordMetadata[inPorts.size()]; int index = 0; for (InputPort port : inPorts.values()) { MVMetadata metadata = metadataPropagationResolver.findMetadata(port.getEdge()); if (metadata == null) { return null; } inputMetadata[index] = metadata.getModel(); index++; } return metadataPropagationResolver.createMVMetadata(getConcatenatedMetadata(inputMetadata), this, OUT_METADATA_ID_SUFFIX); } return null; } public void setTransformationParameters(Properties transformationParameters) { this.transformationParameters = transformationParameters; } /** * This implementation provides no records immediately after writing and allows reading only from the beginning using rewind(). */ private static class ShiftingFileBuffer extends FileRecordBuffer { public ShiftingFileBuffer(int bufferSize) { super(bufferSize); } @Override public void push(CloverBuffer data) throws IOException { super.push(data); readPosition = writePosition; } } }
MINOR: CLO-2683 - Cartesian product buffer size tweaks, code clean git-svn-id: 7dbdde2f5643bf55eb500cbc70a792d560074c67@16932 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
cloveretl.component/src/org/jetel/component/CrossJoin.java
MINOR: CLO-2683 - Cartesian product buffer size tweaks, code clean
Java
apache-2.0
ae7e79628f328c1414464e72f4001e59e6bac827
0
kieker-monitoring/kieker,kieker-monitoring/kieker,kieker-monitoring/kieker,kieker-monitoring/kieker,kieker-monitoring/kieker
/*************************************************************************** * Copyright 2017 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.monitoring.writer.collector; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import kieker.common.configuration.Configuration; import kieker.common.logging.Log; import kieker.common.logging.LogFactory; import kieker.common.record.IMonitoringRecord; import kieker.monitoring.core.controller.ControllerFactory; import kieker.monitoring.core.controller.ReceiveUnfilteredConfiguration; import kieker.monitoring.writer.AbstractMonitoringWriter; import kieker.monitoring.writer.raw.IRawDataWriter; import kieker.monitoring.writer.serializer.IMonitoringRecordSerializer; /** * Chunking collector for monitoring records. The collected records are written if a chunk is * "full", or if no records have been written for some time (see 'deferred write delay'). This * collector employs a writer task, which runs regularly and writes chunks if enough records have * been collected or the deferred write delay has expired. * <p/> * <b>Configuration hints:</b> The collector has several configuration parameters which depend * on one another. In particular, the queue size should be chosen large enough so that the queue * does not fill up in a single task run interval. In addition, the output buffer needs to be * large enough to hold a completely serialized chunk, and therefore depends on the chunk size. * * @author Holger Knoche * * @since 1.13 */ @ReceiveUnfilteredConfiguration public class ChunkingCollector extends AbstractMonitoringWriter { // If multiple workers are required, synchronization between writer tasks has // to be considered. private static final int NUMBER_OF_WORKERS = 1; // Default size for the input queue (in records) private static final int DEFAULT_QUEUE_SIZE = 2048; // Default deferred write delay (in milliseconds) private static final int DEFAULT_DEFERRED_WRITE_DELAY = 500; // Default chunk size (in records) private static final int DEFAULT_CHUNK_SIZE = 16; // Default output buffer size (in bytes) private static final int DEFAULT_OUTPUT_BUFFER_SIZE = 32768; // Default run task run interval (in milliseconds) private static final int DEFAULT_TASK_RUN_INTERVAL = 20; private static final String PREFIX = ChunkingCollector.class.getName() + "."; /** The name of the configuration property for the serializer class name. */ public static final String CONFIG_SERIALIZER_CLASSNAME = PREFIX + "serializer"; // NOCS (afterPREFIX) /** The name of the configuration property for the writer class name. */ public static final String CONFIG_WRITER_CLASSNAME = PREFIX + "writer"; // NOCS (afterPREFIX) /** The name of the configuration property for the deferred write delay. */ public static final String CONFIG_DEFERRED_WRITE_DELAY = PREFIX + "deferredWriteDelay"; // NOCS (afterPREFIX) /** The name of the configuration property for the queue size. */ public static final String CONFIG_QUEUE_SIZE = PREFIX + "queueSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the chunk size. */ public static final String CONFIG_CHUNK_SIZE = PREFIX + "chunkSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the output buffer size. */ public static final String CONFIG_OUTPUT_BUFFER_SIZE = PREFIX + "outputBufferSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the writer task interval. */ public static final String CONFIG_TASK_RUN_INTERVAL = PREFIX + "taskRunInterval"; // NOCS (afterPREFIX) /** The time unit for the writer task interval. */ private static final TimeUnit TASK_RUN_INTERVAL_TIME_UNIT = TimeUnit.MILLISECONDS; private static final Log LOG = LogFactory.getLog(ChunkingCollector.class); private final BlockingQueue<IMonitoringRecord> recordQueue; private final ScheduledExecutorService scheduledExecutor; private final int taskRunInterval; private final ChunkWriterTask writerTask; public ChunkingCollector(final Configuration configuration) { super(configuration); // Initialize the queue and the executor service final int queueSize = configuration.getIntProperty(CONFIG_QUEUE_SIZE, DEFAULT_QUEUE_SIZE); this.taskRunInterval = configuration.getIntProperty(CONFIG_TASK_RUN_INTERVAL, DEFAULT_TASK_RUN_INTERVAL); this.recordQueue = new ArrayBlockingQueue<IMonitoringRecord>(queueSize); this.scheduledExecutor = Executors.newScheduledThreadPool(NUMBER_OF_WORKERS); // Instantiate serializer and writer final ControllerFactory controllerFactory = ControllerFactory.getInstance(configuration); final String serializerName = configuration.getStringProperty(CONFIG_SERIALIZER_CLASSNAME); final IMonitoringRecordSerializer serializer = controllerFactory.createAndInitialize(IMonitoringRecordSerializer.class, serializerName, configuration); final String writerName = configuration.getStringProperty(CONFIG_WRITER_CLASSNAME); final IRawDataWriter writer = controllerFactory.createAndInitialize(IRawDataWriter.class, writerName, configuration); // Instantiate the writer task final int deferredWriteDelayMs = configuration.getIntProperty(CONFIG_DEFERRED_WRITE_DELAY, DEFAULT_DEFERRED_WRITE_DELAY); final int chunkSize = configuration.getIntProperty(CONFIG_CHUNK_SIZE, DEFAULT_CHUNK_SIZE); final int outputBufferSize = configuration.getIntProperty(CONFIG_OUTPUT_BUFFER_SIZE, DEFAULT_OUTPUT_BUFFER_SIZE); this.writerTask = new ChunkWriterTask(chunkSize, deferredWriteDelayMs, outputBufferSize, serializer, writer); } @Override public void onStarting() { this.scheduledExecutor.scheduleAtFixedRate(this.writerTask, 0, this.taskRunInterval, TASK_RUN_INTERVAL_TIME_UNIT); this.writerTask.initialize(); } @Override public void onTerminating() { // Terminate scheduled execution and write remaining chunks, if any this.scheduledExecutor.shutdown(); try { // Wait for the executor to shut down this.scheduledExecutor.awaitTermination(Long.MAX_VALUE, TASK_RUN_INTERVAL_TIME_UNIT); } catch(final InterruptedException e) { LOG.warn("Awaiting termination of the scheduled executor was interrupted.", e); } this.writerTask.terminate(); } private boolean enqueueRecord(final IMonitoringRecord record) { for (int tryNumber = 0; tryNumber < 10; tryNumber++) { // drop out if more than 10 times interrupted try { this.recordQueue.put(record); return true; } catch (final InterruptedException ignore) { // The interrupt status has been reset by the put method when throwing the exception. // We will not propagate the interrupt because the error is reported by returning false. LOG.warn("Interrupted when adding new monitoring record to queue. Try: " + tryNumber); } } LOG.error("Failed to add new monitoring record to queue (maximum number of attempts reached)."); return false; } @Override public void writeMonitoringRecord(final IMonitoringRecord record) { this.enqueueRecord(record); } /** * Writer task to write records collected by the collector. * @author Holger Knoche * @since 1.13 * */ class ChunkWriterTask implements Runnable { private final ByteBuffer buffer; private final IMonitoringRecordSerializer serializer; private final IRawDataWriter writer; private final int outputChunkSize; private final long deferredWriteDelayNs; private volatile long nextWriteTime; public ChunkWriterTask(final int outputChunkSize, final int deferredWriteDelayMs, final int outputBufferSize, final IMonitoringRecordSerializer serializer, final IRawDataWriter writer) { this.serializer = serializer; this.writer = writer; this.outputChunkSize = outputChunkSize; this.deferredWriteDelayNs = deferredWriteDelayMs * 1000000L; this.buffer = ByteBuffer.allocate(outputBufferSize); this.updateNextWriteTime(); } @Override @SuppressWarnings("synthetic-access") public void run() { final BlockingQueue<IMonitoringRecord> queue = ChunkingCollector.this.recordQueue; int numberOfPendingRecords = queue.size(); final int chunkSize = this.outputChunkSize; // Write records if at least one chunk can be filled completely if (numberOfPendingRecords >= chunkSize) { // Write as many chunks as possible do { this.writeChunk(queue, chunkSize); numberOfPendingRecords = queue.size(); } while (numberOfPendingRecords >= chunkSize); // Update the last-write time and return this.updateNextWriteTime(); return; } // If no chunk can be filled, check whether the deferred write interval has expired final long currentTime = System.nanoTime(); if ((numberOfPendingRecords > 0) && (currentTime >= this.nextWriteTime)) { // Write the pending records this.writeChunk(queue, numberOfPendingRecords); this.updateNextWriteTime(currentTime); } } public void initialize() { this.writer.onInitialization(); this.serializer.onInitialization(); } public void terminate() { this.flush(); this.serializer.onTermination(); this.writer.onTermination(); } @SuppressWarnings("synthetic-access") public void flush() { final BlockingQueue<IMonitoringRecord> queue = ChunkingCollector.this.recordQueue; final int chunkSize = this.outputChunkSize; int numberOfPendingRecords = queue.size(); // Put the remaining records into chunks and write them while (numberOfPendingRecords > 0) { final int currentChunkSize; if (numberOfPendingRecords > chunkSize) { currentChunkSize = chunkSize; } else { currentChunkSize = numberOfPendingRecords; } this.writeChunk(queue, currentChunkSize); numberOfPendingRecords -= currentChunkSize; } } private void writeChunk(final BlockingQueue<IMonitoringRecord> queue, final int chunkSize) { final List<IMonitoringRecord> chunk = new ArrayList<IMonitoringRecord>(chunkSize); for (int recordIndex = 0; recordIndex < chunkSize; recordIndex++) { final IMonitoringRecord record = queue.poll(); chunk.add(record); } // Serialize and write the data final ByteBuffer outputBuffer = this.buffer; outputBuffer.rewind(); final int bytesWritten = this.serializer.serializeRecords(chunk, outputBuffer); this.writer.writeData(outputBuffer, 0, bytesWritten); } private void updateNextWriteTime() { this.updateNextWriteTime(System.nanoTime()); } private void updateNextWriteTime(final long currentTime) { this.nextWriteTime = (currentTime + this.deferredWriteDelayNs); } } }
kieker-monitoring/src/kieker/monitoring/writer/collector/ChunkingCollector.java
/*************************************************************************** * Copyright 2017 Kieker Project (http://kieker-monitoring.net) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ***************************************************************************/ package kieker.monitoring.writer.collector; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import kieker.common.configuration.Configuration; import kieker.common.logging.Log; import kieker.common.logging.LogFactory; import kieker.common.record.IMonitoringRecord; import kieker.monitoring.core.controller.ControllerFactory; import kieker.monitoring.core.controller.ReceiveUnfilteredConfiguration; import kieker.monitoring.writer.AbstractMonitoringWriter; import kieker.monitoring.writer.raw.IRawDataWriter; import kieker.monitoring.writer.serializer.IMonitoringRecordSerializer; /** * Chunking collector for monitoring records. The collected records are written if a chunk is * "full", or if no records have been written for some time (see 'deferred write delay'). This * collector employs a writer task, which runs regularly and writes chunks if enough records have * been collected or the deferred write delay has expired. * <p/> * <b>Configuration hints:</b> The collector has several configuration parameters which depend * on one another. In particular, the queue size should be chosen large enough so that the queue * does not fill up in a single task run interval. In addition, the output buffer needs to be * large enough to hold a completely serialized chunk, and therefore depends on the chunk size. * * @author Holger Knoche * * @since 1.13 */ @ReceiveUnfilteredConfiguration public class ChunkingCollector extends AbstractMonitoringWriter { // If multiple workers are required, synchronization between writer tasks has // to be considered. private static final int NUMBER_OF_WORKERS = 1; // Default size for the input queue (in records) private static final int DEFAULT_QUEUE_SIZE = 2048; // Default deferred write delay (in milliseconds) private static final int DEFAULT_DEFERRED_WRITE_DELAY = 500; // Default chunk size (in records) private static final int DEFAULT_CHUNK_SIZE = 16; // Default output buffer size (in bytes) private static final int DEFAULT_OUTPUT_BUFFER_SIZE = 32768; // Default run task run interval (in milliseconds) private static final int DEFAULT_TASK_RUN_INTERVAL = 20; private static final String PREFIX = ChunkingCollector.class.getName() + "."; /** The name of the configuration property for the serializer class name. */ public static final String CONFIG_SERIALIZER_CLASSNAME = PREFIX + "serializer"; // NOCS (afterPREFIX) /** The name of the configuration property for the writer class name. */ public static final String CONFIG_WRITER_CLASSNAME = PREFIX + "writer"; // NOCS (afterPREFIX) /** The name of the configuration property for the deferred write delay. */ public static final String CONFIG_DEFERRED_WRITE_DELAY = PREFIX + "deferredWriteDelay"; // NOCS (afterPREFIX) /** The name of the configuration property for the queue size. */ public static final String CONFIG_QUEUE_SIZE = PREFIX + "queueSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the chunk size. */ public static final String CONFIG_CHUNK_SIZE = PREFIX + "chunkSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the output buffer size. */ public static final String CONFIG_OUTPUT_BUFFER_SIZE = PREFIX + "outputBufferSize"; // NOCS (afterPREFIX) /** The name of the configuration property for the writer task interval. */ public static final String CONFIG_TASK_RUN_INTERVAL = PREFIX + "taskRunInterval"; // NOCS (afterPREFIX) private static final Log LOG = LogFactory.getLog(ChunkingCollector.class); private final BlockingQueue<IMonitoringRecord> recordQueue; private final ScheduledExecutorService scheduledExecutor; private final int taskRunInterval; private final ChunkWriterTask writerTask; public ChunkingCollector(final Configuration configuration) { super(configuration); // Initialize the queue and the executor service final int queueSize = configuration.getIntProperty(CONFIG_QUEUE_SIZE, DEFAULT_QUEUE_SIZE); this.taskRunInterval = configuration.getIntProperty(CONFIG_TASK_RUN_INTERVAL, DEFAULT_TASK_RUN_INTERVAL); this.recordQueue = new ArrayBlockingQueue<IMonitoringRecord>(queueSize); this.scheduledExecutor = Executors.newScheduledThreadPool(NUMBER_OF_WORKERS); // Instantiate serializer and writer final ControllerFactory controllerFactory = ControllerFactory.getInstance(configuration); final String serializerName = configuration.getStringProperty(CONFIG_SERIALIZER_CLASSNAME); final IMonitoringRecordSerializer serializer = controllerFactory.createAndInitialize(IMonitoringRecordSerializer.class, serializerName, configuration); final String writerName = configuration.getStringProperty(CONFIG_WRITER_CLASSNAME); final IRawDataWriter writer = controllerFactory.createAndInitialize(IRawDataWriter.class, writerName, configuration); // Instantiate the writer task final int deferredWriteDelayMs = configuration.getIntProperty(CONFIG_DEFERRED_WRITE_DELAY, DEFAULT_DEFERRED_WRITE_DELAY); final int chunkSize = configuration.getIntProperty(CONFIG_CHUNK_SIZE, DEFAULT_CHUNK_SIZE); final int outputBufferSize = configuration.getIntProperty(CONFIG_OUTPUT_BUFFER_SIZE, DEFAULT_OUTPUT_BUFFER_SIZE); this.writerTask = new ChunkWriterTask(chunkSize, deferredWriteDelayMs, outputBufferSize, serializer, writer); } @Override public void onStarting() { this.scheduledExecutor.scheduleAtFixedRate(this.writerTask, 0, this.taskRunInterval, TimeUnit.MILLISECONDS); this.writerTask.initialize(); } @Override public void onTerminating() { // Terminate scheduled execution and write remaining chunks, if any this.scheduledExecutor.shutdown(); this.writerTask.terminate(); } private boolean enqueueRecord(final IMonitoringRecord record) { for (int tryNumber = 0; tryNumber < 10; tryNumber++) { // drop out if more than 10 times interrupted try { this.recordQueue.put(record); return true; } catch (final InterruptedException ignore) { // The interrupt status has been reset by the put method when throwing the exception. // We will not propagate the interrupt because the error is reported by returning false. LOG.warn("Interrupted when adding new monitoring record to queue. Try: " + tryNumber); } } LOG.error("Failed to add new monitoring record to queue (maximum number of attempts reached)."); return false; } @Override public void writeMonitoringRecord(final IMonitoringRecord record) { this.enqueueRecord(record); } /** * Writer task to write records collected by the collector. * @author Holger Knoche * @since 1.13 * */ class ChunkWriterTask implements Runnable { private final ByteBuffer buffer; private final IMonitoringRecordSerializer serializer; private final IRawDataWriter writer; private final int outputChunkSize; private final long deferredWriteDelayNs; private volatile long nextWriteTime; public ChunkWriterTask(final int outputChunkSize, final int deferredWriteDelayMs, final int outputBufferSize, final IMonitoringRecordSerializer serializer, final IRawDataWriter writer) { this.serializer = serializer; this.writer = writer; this.outputChunkSize = outputChunkSize; this.deferredWriteDelayNs = deferredWriteDelayMs * 1000000L; this.buffer = ByteBuffer.allocate(outputBufferSize); this.updateNextWriteTime(); } @Override @SuppressWarnings("synthetic-access") public void run() { final BlockingQueue<IMonitoringRecord> queue = ChunkingCollector.this.recordQueue; int numberOfPendingRecords = queue.size(); final int chunkSize = this.outputChunkSize; // Write records if at least one chunk can be filled completely if (numberOfPendingRecords >= chunkSize) { // Write as many chunks as possible do { this.writeChunk(queue, chunkSize); numberOfPendingRecords = queue.size(); } while (numberOfPendingRecords >= chunkSize); // Update the last-write time and return this.updateNextWriteTime(); return; } // If no chunk can be filled, check whether the deferred write interval has expired final long currentTime = System.nanoTime(); if ((numberOfPendingRecords > 0) && (currentTime >= this.nextWriteTime)) { // Write the pending records this.writeChunk(queue, numberOfPendingRecords); this.updateNextWriteTime(currentTime); } } public void initialize() { this.writer.onInitialization(); this.serializer.onInitialization(); } public void terminate() { this.flush(); this.serializer.onTermination(); this.writer.onTermination(); } @SuppressWarnings("synthetic-access") public void flush() { final BlockingQueue<IMonitoringRecord> queue = ChunkingCollector.this.recordQueue; final int chunkSize = this.outputChunkSize; int numberOfPendingRecords = queue.size(); // Put the remaining records into chunks and write them while (numberOfPendingRecords > 0) { final int currentChunkSize; if (numberOfPendingRecords > chunkSize) { currentChunkSize = chunkSize; } else { currentChunkSize = numberOfPendingRecords; } this.writeChunk(queue, currentChunkSize); numberOfPendingRecords -= currentChunkSize; } } private void writeChunk(final BlockingQueue<IMonitoringRecord> queue, final int chunkSize) { final List<IMonitoringRecord> chunk = new ArrayList<IMonitoringRecord>(chunkSize); for (int recordIndex = 0; recordIndex < chunkSize; recordIndex++) { final IMonitoringRecord record = queue.poll(); chunk.add(record); } // Serialize and write the data final ByteBuffer outputBuffer = this.buffer; outputBuffer.rewind(); final int bytesWritten = this.serializer.serializeRecords(chunk, outputBuffer); this.writer.writeData(outputBuffer, 0, bytesWritten); } private void updateNextWriteTime() { this.updateNextWriteTime(System.nanoTime()); } private void updateNextWriteTime(final long currentTime) { this.nextWriteTime = (currentTime + this.deferredWriteDelayNs); } } }
Chunking collector did not wait for scheduled job to finish
kieker-monitoring/src/kieker/monitoring/writer/collector/ChunkingCollector.java
Chunking collector did not wait for scheduled job to finish
Java
apache-2.0
7725c2e52a25f0af0c425d1535549517c2f9b1bf
0
maxyeg/beaker-notebook,bradparks/beaker-notebook,codeaudit/beaker-notebook,mattyb149/beaker-notebook,Chasego/beaker-notebook,ScottPJones/beaker-notebook,vital-ai/beaker-notebook,Chasego/beaker-notebook,Chasego/beaker-notebook,mattyb149/beaker-notebook,vital-ai/beaker-notebook,bradparks/beaker-notebook,brosander/beaker-notebook,Chasego/beaker-notebook,sirinath/beaker-notebook,gef756/beaker-notebook,brosander/beaker-notebook,ScottPJones/beaker-notebook,gef756/beaker-notebook,vital-ai/beaker-notebook,codeaudit/beaker-notebook,codeaudit/beaker-notebook,brosander/beaker-notebook,mattyb149/beaker-notebook,gef756/beaker-notebook,sirinath/beaker-notebook,sirinath/beaker-notebook,ScottPJones/beaker-notebook,bradparks/beaker-notebook,vital-ai/beaker-notebook,ScottPJones/beaker-notebook,mattyb149/beaker-notebook,bradparks/beaker-notebook,vital-ai/beaker-notebook,codeaudit/beaker-notebook,ScottPJones/beaker-notebook,bradparks/beaker-notebook,sirinath/beaker-notebook,maxyeg/beaker-notebook,Chasego/beaker-notebook,gef756/beaker-notebook,bradparks/beaker-notebook,brosander/beaker-notebook,Chasego/beaker-notebook,maxyeg/beaker-notebook,gef756/beaker-notebook,codeaudit/beaker-notebook,mattyb149/beaker-notebook,mattyb149/beaker-notebook,vital-ai/beaker-notebook,brosander/beaker-notebook,sirinath/beaker-notebook,mattyb149/beaker-notebook,gef756/beaker-notebook,Chasego/beaker-notebook,maxyeg/beaker-notebook,maxyeg/beaker-notebook,sirinath/beaker-notebook,maxyeg/beaker-notebook,sirinath/beaker-notebook,brosander/beaker-notebook,gef756/beaker-notebook,brosander/beaker-notebook,bradparks/beaker-notebook,codeaudit/beaker-notebook,vital-ai/beaker-notebook
/* * Copyright 2014 TWO SIGMA OPEN SOURCE, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twosigma.beaker.core.module.config; import com.google.inject.Inject; import com.google.inject.Singleton; import com.twosigma.beaker.shared.module.util.GeneralUtils; import com.twosigma.beaker.core.rest.StreamGobbler; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.Exception; import java.net.UnknownHostException; import java.net.InetAddress; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.RandomStringUtils; import org.json.simple.JSONObject; import org.json.simple.JSONValue; /** * DefaultBeakerConfig * holds the default beaker configuration which used by DefaultBeakerConfigModule who binds it * to BeakerConfig interface. The configuration is determined with both constant default * values and the input of BeakerConfigPref */ @Singleton public class DefaultBeakerConfig implements BeakerConfig { private final String installDir; private final String [] searchDirs; private final String pluginDir; private final String dotDir; private final String nginxDir; private final String nginxBinDir; private final String nginxStaticDir; private final String nginxServDir; private final String nginxExtraRules; private final Map<String, String> nginxPluginRules; private final Boolean useKerberos; private final Boolean publicServer; private final Boolean noPasswordAllowed; private final String authCookie; private final String passwordHash; private final String password; private final Integer portBase; private final Integer reservedPortCount; private final String configFileUrl; private final String preferenceFileUrl; private final String defaultNotebookUrl; private final String recentNotebooksFileUrl; private final String sessionBackupDir; private final Map<String, String> pluginLocations; private final Map<String, String> pluginOptions; private final Map<String, String[]> pluginEnvps; private final String version; private final String buildTime; private final String hash; private final String gist_server; private final String sharing_server; private final JSONObject prefs; private String hash(String password) { return DigestUtils.sha512Hex(password + getPasswordSalt()); } @Inject public DefaultBeakerConfig(BeakerConfigPref pref, GeneralUtils utils) throws UnknownHostException, IOException, InterruptedException { this.installDir = System.getProperty("user.dir"); this.searchDirs = new String [1]; this.searchDirs[0] = this.installDir; this.useKerberos = pref.getUseKerberos(); this.portBase = pref.getPortBase(); this.reservedPortCount = 4; this.dotDir = System.getProperty("user.home") + "/.beaker/v1"; this.pluginDir = this.installDir + "/config/plugins/eval"; utils.ensureDirectoryExists(this.dotDir); this.nginxDir = this.installDir + "/nginx"; if (System.getProperty("beaker.nginx.bin.dir") != null) { this.nginxBinDir = System.getProperty("beaker.nginx.bin.dir"); } else { this.nginxBinDir = ""; // assuming nginx is available in PATH } this.nginxServDir = utils.createTempDirectory(this.dotDir, "nginx"); this.nginxStaticDir = this.installDir + "/src/main/web"; this.nginxExtraRules = ""; this.nginxPluginRules = new HashMap<>(); String configDir = this.dotDir + "/config"; utils.ensureDirectoryExists(configDir); final String defaultConfigFile = this.installDir + "/config/beaker.conf.json"; this.configFileUrl = defaultConfigFile; final String defaultPreferenceFile = this.installDir + "/config/beaker.pref.json"; final String preferenceFile = configDir + "/beaker.pref.json"; utils.ensureFileHasContent(preferenceFile, defaultPreferenceFile); this.preferenceFileUrl = preferenceFile; String content = utils.readFile(this.preferenceFileUrl); JSONObject obj = (JSONObject)JSONValue.parse(content); if (obj.get("gist_server") != null) this.gist_server = (String)obj.get("gist_server"); else this.gist_server = "https://api.github.com/gists"; if (obj.get("sharing_server") != null) this.sharing_server = (String)obj.get("sharing_server"); else this.sharing_server = "http://sharing.beakernotebook.com/gist/anonymous"; this.prefs = obj; final String prefDefaultNotebookUrl = pref.getDefaultNotebookUrl(); final String mainDefaultNotebookPath = this.dotDir + "/config/default.bkr"; final String defaultDefaultNotebookPath = this.installDir + "/config/default.bkr"; if (prefDefaultNotebookUrl != null) { this.defaultNotebookUrl = prefDefaultNotebookUrl; } else { File f = new File(mainDefaultNotebookPath); if(f.exists()) this.defaultNotebookUrl = mainDefaultNotebookPath; else this.defaultNotebookUrl = defaultDefaultNotebookPath; } String varDir = this.dotDir + "/var"; utils.ensureDirectoryExists(varDir); this.recentNotebooksFileUrl = varDir + "/recentNotebooks"; this.sessionBackupDir = varDir + "/sessionBackups"; utils.ensureDirectoryExists(this.sessionBackupDir); this.pluginLocations = new HashMap<>(); this.pluginOptions = pref.getPluginOptions(); this.pluginEnvps = new HashMap<>(); augmentPluginOptions(); this.publicServer = pref.getPublicServer(); this.noPasswordAllowed = pref.getNoPasswordAllowed(); this.authCookie = RandomStringUtils.random(40, true, true); // XXX user might provide their own hash in beaker.config.json String password = RandomStringUtils.random(15, true, true); this.passwordHash = hash(password); this.password = password; if (this.publicServer) { String cert = this.nginxServDir + "/ssl_cert.pem"; String tmp = this.nginxServDir + "/cert.tmp"; PrintWriter pw = new PrintWriter(tmp); for (int i = 0; i < 10; i++) pw.printf("\n"); pw.close(); // XXX I am baffled as to why using sh and this pipe is // necessary, but if you just exec openssl and write into its // stdin then it hangs. String[] cmd = {"sh", "-c", "cat " + tmp + " | openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout " + cert + " -out " + cert}; Process proc = Runtime.getRuntime().exec(cmd); proc.waitFor(); } this.version = utils.readFile(this.installDir + "/config/version"); this.buildTime = utils.readFile(this.installDir + "/config/build_time"); this.hash = utils.readFile(this.installDir + "/config/hash"); } @Override public String getInstallDirectory() { return this.installDir; } @Override public String [] getFileSearchDirs() { return this.searchDirs; } @Override public String getPluginDirectory() { return this.pluginDir; } @Override public String getDotDirectory() { return this.dotDir; } @Override public String getNginxDirectory() { return this.nginxDir; } @Override public String getNginxBinDirectory() { return this.nginxBinDir; } @Override public String getNginxStaticDirectory() { return this.nginxStaticDir; } @Override public String getNginxServDirectory() { return this.nginxServDir; } @Override public String getNginxExtraRules() { return this.nginxExtraRules; } @Override public Map<String, String> getNginxPluginRules() { return this.nginxPluginRules; } @Override public Boolean getPublicServer() { return this.publicServer; } @Override public Boolean getNoPasswordAllowed() { return this.noPasswordAllowed; } @Override public Integer getPortBase() { return this.portBase; } @Override public Integer getReservedPortCount() { return this.reservedPortCount; } @Override public Boolean getUseKerberos() { return this.useKerberos; } @Override public String getConfigFileUrl() { return this.configFileUrl; } @Override public String getPreferenceFileUrl() { return this.preferenceFileUrl; } @Override public String getDefaultNotebookUrl() { return this.defaultNotebookUrl; } @Override public String getRecentNotebooksFileUrl() { return this.recentNotebooksFileUrl; } @Override public String getSessionBackupsDirectory() { return this.sessionBackupDir; } @Override public Map<String, String> getPluginLocations() { return this.pluginLocations; } @Override public Map<String, String> getPluginOptions() { return this.pluginOptions; } @Override public Map<String, String[]> getPluginEnvps() { return this.pluginEnvps; } @Override public String getAuthCookie() { return this.authCookie; } /* When the hash can be stored in a file, need to generate salt randomly. See github Issue #319 */ @Override public String getPasswordSalt() { return ".beaker.N0tebook"; } @Override public String getPasswordHash() { return this.passwordHash; } @Override public String getPassword() { return this.password; } @Override public String getBaseURL() throws UnknownHostException { String initUrl; String hostname = this.publicServer ? InetAddress.getLocalHost().getHostName() : "127.0.0.1"; boolean useHttps = this.publicServer; // XXX should be independently setable if (useHttps) { initUrl = "https://" + hostname + ":" + this.portBase + "/"; } else { initUrl = "http://" + (this.useKerberos ? (System.getProperty("user.name") + ".") : "") + hostname + ":" + (portBase + 1) + "/"; } return initUrl; } @Override public String getVersion() { return this.version; } @Override public String getBuildTime() { return this.buildTime; } @Override public String getHash() { return this.hash; } @Override public String getMainPageFileName() { return this.installDir + "/src/main/web/app/template/index_template.html"; } @Override public String getGistServerUrl() { return this.gist_server; } @Override public String getSharingServerUrl() { return this.sharing_server; } @Override public String getPluginPath(String plugin) { String result = null; try { JSONObject plugins = (JSONObject) this.prefs.get("plugins"); JSONObject pprefs = (JSONObject) plugins.get(plugin); result = (String) pprefs.get("path"); } catch (Exception e) { // ignore } return result; } private void augmentPluginOptions() { try { Map<String, JSONObject> plugins = (Map<String, JSONObject>) this.prefs.get("plugins"); for (Map.Entry<String, JSONObject> entry: plugins.entrySet()) { String options = (String) entry.getValue().get("options"); this.pluginOptions.put(entry.getKey(), options); } } catch (Exception e) { // ignore } } }
core/src/main/java/com/twosigma/beaker/core/module/config/DefaultBeakerConfig.java
/* * Copyright 2014 TWO SIGMA OPEN SOURCE, LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twosigma.beaker.core.module.config; import com.google.inject.Inject; import com.google.inject.Singleton; import com.twosigma.beaker.shared.module.util.GeneralUtils; import com.twosigma.beaker.core.rest.StreamGobbler; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.lang.Exception; import java.net.UnknownHostException; import java.net.InetAddress; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.UUID; import org.apache.commons.codec.digest.DigestUtils; import org.apache.commons.lang3.RandomStringUtils; import org.json.simple.JSONObject; import org.json.simple.JSONValue; /** * DefaultBeakerConfig * holds the default beaker configuration which used by DefaultBeakerConfigModule who binds it * to BeakerConfig interface. The configuration is determined with both constant default * values and the input of BeakerConfigPref */ @Singleton public class DefaultBeakerConfig implements BeakerConfig { private final String installDir; private final String [] searchDirs; private final String pluginDir; private final String dotDir; private final String nginxDir; private final String nginxBinDir; private final String nginxStaticDir; private final String nginxServDir; private final String nginxExtraRules; private final Map<String, String> nginxPluginRules; private final Boolean useKerberos; private final Boolean publicServer; private final Boolean noPasswordAllowed; private final String authCookie; private final String passwordHash; private final String password; private final Integer portBase; private final Integer reservedPortCount; private final String configFileUrl; private final String preferenceFileUrl; private final String defaultNotebookUrl; private final String recentNotebooksFileUrl; private final String sessionBackupDir; private final Map<String, String> pluginLocations; private final Map<String, String> pluginOptions; private final Map<String, String[]> pluginEnvps; private final String version; private final String buildTime; private final String hash; private final String gist_server; private final String sharing_server; private final JSONObject prefs; private String hash(String password) { return DigestUtils.sha512Hex(password + getPasswordSalt()); } @Inject public DefaultBeakerConfig(BeakerConfigPref pref, GeneralUtils utils) throws UnknownHostException, IOException, InterruptedException { this.installDir = System.getProperty("user.dir"); this.searchDirs = new String [1]; this.searchDirs[0] = this.installDir; this.useKerberos = pref.getUseKerberos(); this.portBase = pref.getPortBase(); this.reservedPortCount = 4; this.dotDir = System.getProperty("user.home") + "/.beaker/v1"; this.pluginDir = this.installDir + "/config/plugins/eval"; utils.ensureDirectoryExists(this.dotDir); this.nginxDir = this.installDir + "/nginx"; if (System.getProperty("beaker.nginx.bin.dir") != null) { this.nginxBinDir = System.getProperty("beaker.nginx.bin.dir"); } else { this.nginxBinDir = ""; // assuming nginx is available in PATH } this.nginxServDir = utils.createTempDirectory(this.dotDir, "nginx"); this.nginxStaticDir = this.installDir + "/src/main/web"; this.nginxExtraRules = ""; this.nginxPluginRules = new HashMap<>(); String configDir = this.dotDir + "/config"; utils.ensureDirectoryExists(configDir); final String defaultConfigFile = this.installDir + "/config/beaker.conf.json"; this.configFileUrl = defaultConfigFile; final String defaultPreferenceFile = this.installDir + "/config/beaker.pref.json"; final String preferenceFile = configDir + "/beaker.pref.json"; utils.ensureFileHasContent(preferenceFile, defaultPreferenceFile); this.preferenceFileUrl = preferenceFile; String content = utils.readFile(this.preferenceFileUrl); JSONObject obj = (JSONObject)JSONValue.parse(content); if (obj.get("gist_server") != null) this.gist_server = (String)obj.get("gist_server"); else this.gist_server = "https://api.github.com/gists"; if (obj.get("sharing_server") != null) this.sharing_server = (String)obj.get("sharing_server"); else this.sharing_server = "http://sharing.beakernotebook.com/gist/anonymous"; this.prefs = obj; final String prefDefaultNotebookUrl = pref.getDefaultNotebookUrl(); final String mainDefaultNotebookPath = this.dotDir + "/config/default.bkr"; final String defaultDefaultNotebookPath = this.installDir + "/config/default.bkr"; if (prefDefaultNotebookUrl != null) { this.defaultNotebookUrl = prefDefaultNotebookUrl; } else { File f = new File(mainDefaultNotebookPath); if(f.exists()) this.defaultNotebookUrl = mainDefaultNotebookPath; else this.defaultNotebookUrl = defaultDefaultNotebookPath; } String varDir = this.dotDir + "/var"; utils.ensureDirectoryExists(varDir); this.recentNotebooksFileUrl = varDir + "/recentNotebooks"; this.sessionBackupDir = varDir + "/sessionBackups"; utils.ensureDirectoryExists(this.sessionBackupDir); this.pluginLocations = new HashMap<>(); this.pluginOptions = pref.getPluginOptions(); this.pluginEnvps = new HashMap<>(); this.publicServer = pref.getPublicServer(); this.noPasswordAllowed = pref.getNoPasswordAllowed(); this.authCookie = RandomStringUtils.random(40, true, true); // XXX user might provide their own hash in beaker.config.json String password = RandomStringUtils.random(15, true, true); this.passwordHash = hash(password); this.password = password; if (this.publicServer) { String cert = this.nginxServDir + "/ssl_cert.pem"; String tmp = this.nginxServDir + "/cert.tmp"; PrintWriter pw = new PrintWriter(tmp); for (int i = 0; i < 10; i++) pw.printf("\n"); pw.close(); // XXX I am baffled as to why using sh and this pipe is // necessary, but if you just exec openssl and write into its // stdin then it hangs. String[] cmd = {"sh", "-c", "cat " + tmp + " | openssl req -x509 -nodes -days 365 -newkey rsa:1024 -keyout " + cert + " -out " + cert}; Process proc = Runtime.getRuntime().exec(cmd); proc.waitFor(); } this.version = utils.readFile(this.installDir + "/config/version"); this.buildTime = utils.readFile(this.installDir + "/config/build_time"); this.hash = utils.readFile(this.installDir + "/config/hash"); } @Override public String getInstallDirectory() { return this.installDir; } @Override public String [] getFileSearchDirs() { return this.searchDirs; } @Override public String getPluginDirectory() { return this.pluginDir; } @Override public String getDotDirectory() { return this.dotDir; } @Override public String getNginxDirectory() { return this.nginxDir; } @Override public String getNginxBinDirectory() { return this.nginxBinDir; } @Override public String getNginxStaticDirectory() { return this.nginxStaticDir; } @Override public String getNginxServDirectory() { return this.nginxServDir; } @Override public String getNginxExtraRules() { return this.nginxExtraRules; } @Override public Map<String, String> getNginxPluginRules() { return this.nginxPluginRules; } @Override public Boolean getPublicServer() { return this.publicServer; } @Override public Boolean getNoPasswordAllowed() { return this.noPasswordAllowed; } @Override public Integer getPortBase() { return this.portBase; } @Override public Integer getReservedPortCount() { return this.reservedPortCount; } @Override public Boolean getUseKerberos() { return this.useKerberos; } @Override public String getConfigFileUrl() { return this.configFileUrl; } @Override public String getPreferenceFileUrl() { return this.preferenceFileUrl; } @Override public String getDefaultNotebookUrl() { return this.defaultNotebookUrl; } @Override public String getRecentNotebooksFileUrl() { return this.recentNotebooksFileUrl; } @Override public String getSessionBackupsDirectory() { return this.sessionBackupDir; } @Override public Map<String, String> getPluginLocations() { return this.pluginLocations; } @Override public Map<String, String> getPluginOptions() { return this.pluginOptions; } @Override public Map<String, String[]> getPluginEnvps() { return this.pluginEnvps; } @Override public String getAuthCookie() { return this.authCookie; } /* When the hash can be stored in a file, need to generate salt randomly. See github Issue #319 */ @Override public String getPasswordSalt() { return ".beaker.N0tebook"; } @Override public String getPasswordHash() { return this.passwordHash; } @Override public String getPassword() { return this.password; } @Override public String getBaseURL() throws UnknownHostException { String initUrl; String hostname = this.publicServer ? InetAddress.getLocalHost().getHostName() : "127.0.0.1"; boolean useHttps = this.publicServer; // XXX should be independently setable if (useHttps) { initUrl = "https://" + hostname + ":" + this.portBase + "/"; } else { initUrl = "http://" + (this.useKerberos ? (System.getProperty("user.name") + ".") : "") + hostname + ":" + (portBase + 1) + "/"; } return initUrl; } @Override public String getVersion() { return this.version; } @Override public String getBuildTime() { return this.buildTime; } @Override public String getHash() { return this.hash; } @Override public String getMainPageFileName() { return this.installDir + "/src/main/web/app/template/index_template.html"; } @Override public String getGistServerUrl() { return this.gist_server; } @Override public String getSharingServerUrl() { return this.sharing_server; } @Override public String getPluginPath(String plugin) { String result = null; try { JSONObject plugins = (JSONObject) this.prefs.get("plugins"); JSONObject pprefs = (JSONObject) plugins.get(plugin); result = (String) pprefs.get("path"); } catch (Exception e) { // ignore } return result; } }
use plugin options from prefs file
core/src/main/java/com/twosigma/beaker/core/module/config/DefaultBeakerConfig.java
use plugin options from prefs file
Java
apache-2.0
868a36d67ba0271931c37af02c9af45533a996d4
0
dzh/jframe,dzh/jframe,dzh/jframe,dzh/jframe
/** * */ package jframe.jedis.service; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jframe.core.plugin.annotation.InjectPlugin; import jframe.core.plugin.annotation.Injector; import jframe.core.plugin.annotation.Start; import jframe.core.plugin.annotation.Stop; import jframe.core.util.PropsConf; import jframe.jedis.JedisPlugin; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import redis.clients.jedis.Protocol; /** * @author dzh * @date Aug 6, 2014 1:31:26 PM * @since 1.0 */ @Injector public class JedisServiceImpl implements JedisService { static final Logger LOG = LoggerFactory.getLogger(JedisServiceImpl.class); private PropsConf conf = new PropsConf(); // groupId private Map<String, JedisPool> _jedis = new HashMap<String, JedisPool>(); public PropsConf init(File jedis) throws Exception { if (!jedis.exists()) { LOG.error("Not found jedis file {}", jedis.getAbsolutePath()); throw new FileNotFoundException("jedis file not found!" + jedis.getAbsolutePath()); } return init(new FileInputStream(jedis), true); } public PropsConf init(InputStream jedis, boolean closeIn) throws Exception { conf.init(jedis); return conf; } JedisPoolConfig createPoolConfig(PropsConf conf, String id) { JedisPoolConfig config = new JedisPoolConfig(); config.setMaxTotal(conf.getConfInt(id, "redis.conn.maxTotal", "200")); config.setMaxIdle(conf.getConfInt(id, "redis.conn.maxIdle", "100")); config.setMinIdle(conf.getConfInt(id, "redis.conn.minIdle", "1")); config.setMaxWaitMillis(conf.getConfLong(id, "redis.conn.maxWaitMillis", "3000")); config.setTestOnBorrow(true); return config; } @InjectPlugin static JedisPlugin plugin; @Start public void start() { String jedis = plugin.getConfig("file.redis", ""); if ("".equals(jedis)) { LOG.error("jedis.conf not found! {}", jedis); return; } try { start(init(new File(jedis))); } catch (Exception e) { LOG.error(e.getMessage(), e); } } public void start(PropsConf conf) { LOG.info("JedisServiceImpl starting"); String[] hosts = conf.getGroupIds(); for (String h : hosts) { if ("".equals(h)) continue; try { String ip = conf.getConf(h, "ip", "127.0.0.1"); // if ("127.0.0.1".equals(ip)) { // continue; // } int port = conf.getConfInt(h, "port", "6379"); int timeout = conf.getConfInt(h, "timeout", "2000"); String passwd = conf.getConf(h, "passwd").trim(); int database = conf.getConfInt(h, "database", String.valueOf(Protocol.DEFAULT_DATABASE));// 0 JedisPoolConfig config = createPoolConfig(conf, h); _jedis.put(h, new JedisPool(config, ip, port, timeout, "".equals(passwd) ? null : passwd, database)); } catch (Exception e) { LOG.error(e.getMessage(), e); continue; } } LOG.info("JedisServiceImpl start successfully"); } @Stop public void stop() { LOG.info("JedisServiceImpl stopping"); Iterator<String> iter = _jedis.keySet().iterator(); while (iter.hasNext()) { try { JedisPool j = _jedis.get(iter.next()); if (j != null) j.destroy(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } LOG.info("JedisServiceImpl stop successfully"); close(); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedis(java.lang.String) */ @Override public Jedis getJedis(String name) { try { JedisPool pool = _jedis.get(name); if (pool == null) return null; return pool.getResource(); } catch (Exception e) { LOG.error(e.getMessage(), e); } return null; } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedisCluster(java.lang.String) */ @Override public JedisCluster getJedisCluster(String name) { // Set<HostAndPort> jedisClusterNodes = new HashSet<HostAndPort>(); // //Jedis Cluster will attempt to discover cluster nodes automatically // jedisClusterNodes.add(new HostAndPort("127.0.0.1", 7379)); // JedisCluster jc = new JedisCluster(jedisClusterNodes); // jc.set("foo", "bar"); // String value = jc.get("foo"); return null; } void close() { if (conf != null) conf.clear(); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedis() */ @Override @Deprecated public Jedis getJedis() { try { JedisPool pool = _jedis.get(conf.getConf(null, "redis.host")); if (pool == null) return null; return pool.getResource(); } catch (Exception e) { LOG.error(e.getMessage(), e); } return null; } /* * (non-Javadoc) * @see * dono.pay.service.JedisService#recycleJedis(redis.clients.jedis.Jedis) */ @Override public void recycleJedis(Jedis jedis) { recycleJedis(null, jedis); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#recycleJedis(java.lang.String, * redis.clients.jedis.Jedis) */ @Override public void recycleJedis(String name, Jedis jedis) { if (conf == null || _jedis == null || name == null) return; JedisPool pool = _jedis.get(name); if (pool == null) { if (LOG.isDebugEnabled()) { LOG.debug("Not found jedis name {}", name); } return; } pool.returnResource(jedis); } }
jframe-plugin/jframe-jedis/src/main/java/jframe/jedis/service/JedisServiceImpl.java
/** * */ package jframe.jedis.service; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import jframe.core.plugin.annotation.InjectPlugin; import jframe.core.plugin.annotation.Injector; import jframe.core.plugin.annotation.Start; import jframe.core.plugin.annotation.Stop; import jframe.core.util.PropsConf; import jframe.jedis.JedisPlugin; import redis.clients.jedis.Jedis; import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import redis.clients.jedis.Protocol; /** * @author dzh * @date Aug 6, 2014 1:31:26 PM * @since 1.0 */ @Injector public class JedisServiceImpl implements JedisService { static final Logger LOG = LoggerFactory.getLogger(JedisServiceImpl.class); private PropsConf conf = new PropsConf(); // groupId private Map<String, JedisPool> _jedis = new HashMap<String, JedisPool>(); public PropsConf init(File jedis) throws Exception { if (!jedis.exists()) { LOG.error("Not found jedis file {}", jedis.getAbsolutePath()); throw new FileNotFoundException("jedis file not found!" + jedis.getAbsolutePath()); } return init(new FileInputStream(jedis), true); } public PropsConf init(InputStream jedis, boolean closeIn) throws Exception { conf.init(jedis); return conf; } JedisPoolConfig createPoolConfig(PropsConf conf, String id) { JedisPoolConfig config = new JedisPoolConfig(); config.setMaxTotal(conf.getConfInt(id, "redis.conn.maxTotal", "200")); config.setMaxIdle(conf.getConfInt(id, "redis.conn.maxIdle", "100")); config.setMinIdle(conf.getConfInt(id, "redis.conn.minIdle", "1")); config.setMaxWaitMillis(conf.getConfLong(id, "redis.conn.maxWaitMillis", "3000")); config.setTestOnBorrow(true); return config; } @InjectPlugin static JedisPlugin plugin; @Start public void start() { String jedis = plugin.getConfig("file.redis", ""); if ("".equals(jedis)) { LOG.error("jedis.conf not found! {}", jedis); return; } try { start(init(new File(jedis))); } catch (Exception e) { LOG.error(e.getMessage(), e); } } public void start(PropsConf conf) { LOG.info("JedisServiceImpl starting"); String[] hosts = conf.getGroupIds(); for (String h : hosts) { if ("".equals(h)) continue; try { String ip = conf.getConf(h, "ip"); // if ("127.0.0.1".equals(ip)) { // continue; // } int port = conf.getConfInt(h, "port", "6379"); int timeout = conf.getConfInt(h, "timeout", "2000"); String passwd = conf.getConf(h, "passwd").trim(); int database = conf.getConfInt(h, "database", String.valueOf(Protocol.DEFAULT_DATABASE));// 0 JedisPoolConfig config = createPoolConfig(conf, h); _jedis.put(h, new JedisPool(config, ip, port, timeout, "".equals(passwd) ? null : passwd, database)); } catch (Exception e) { LOG.error(e.getMessage(), e); continue; } } LOG.info("JedisServiceImpl start successfully"); } @Stop public void stop() { LOG.info("JedisServiceImpl stopping"); Iterator<String> iter = _jedis.keySet().iterator(); while (iter.hasNext()) { try { JedisPool j = _jedis.get(iter.next()); if (j != null) j.destroy(); } catch (Exception e) { LOG.warn(e.getMessage(), e); } } LOG.info("JedisServiceImpl stop successfully"); close(); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedis(java.lang.String) */ @Override public Jedis getJedis(String name) { try { JedisPool pool = _jedis.get(name); if (pool == null) return null; return pool.getResource(); } catch (Exception e) { LOG.error(e.getMessage(), e); } return null; } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedisCluster(java.lang.String) */ @Override public JedisCluster getJedisCluster(String name) { // Set<HostAndPort> jedisClusterNodes = new HashSet<HostAndPort>(); // //Jedis Cluster will attempt to discover cluster nodes automatically // jedisClusterNodes.add(new HostAndPort("127.0.0.1", 7379)); // JedisCluster jc = new JedisCluster(jedisClusterNodes); // jc.set("foo", "bar"); // String value = jc.get("foo"); return null; } void close() { if (conf != null) conf.clear(); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#getJedis() */ @Override @Deprecated public Jedis getJedis() { try { JedisPool pool = _jedis.get(conf.getConf(null, "redis.host")); if (pool == null) return null; return pool.getResource(); } catch (Exception e) { LOG.error(e.getMessage(), e); } return null; } /* * (non-Javadoc) * @see * dono.pay.service.JedisService#recycleJedis(redis.clients.jedis.Jedis) */ @Override public void recycleJedis(Jedis jedis) { recycleJedis(null, jedis); } /* * (non-Javadoc) * @see dono.pay.service.JedisService#recycleJedis(java.lang.String, * redis.clients.jedis.Jedis) */ @Override public void recycleJedis(String name, Jedis jedis) { if (conf == null || _jedis == null || name == null) return; JedisPool pool = _jedis.get(name); if (pool == null) { if (LOG.isDebugEnabled()) { LOG.debug("Not found jedis name {}", name); } return; } pool.returnResource(jedis); } }
jedis default ip
jframe-plugin/jframe-jedis/src/main/java/jframe/jedis/service/JedisServiceImpl.java
jedis default ip
Java
apache-2.0
bb939111a026a12880a1e9b75c5579c366177fe0
0
reactivex/rxjava,reactivex/rxjava,NiteshKant/RxJava,artem-zinnatullin/RxJava,ReactiveX/RxJava,NiteshKant/RxJava,artem-zinnatullin/RxJava,ReactiveX/RxJava
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex; import java.util.NoSuchElementException; import java.util.concurrent.*; import org.reactivestreams.Publisher; import io.reactivex.annotations.*; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.Exceptions; import io.reactivex.functions.*; import io.reactivex.internal.functions.*; import io.reactivex.internal.fuseable.*; import io.reactivex.internal.observers.*; import io.reactivex.internal.operators.completable.*; import io.reactivex.internal.operators.flowable.*; import io.reactivex.internal.operators.maybe.*; import io.reactivex.internal.operators.mixed.*; import io.reactivex.internal.operators.observable.*; import io.reactivex.internal.operators.single.*; import io.reactivex.internal.util.*; import io.reactivex.observers.TestObserver; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.schedulers.Schedulers; /** * The {@code Single} class implements the Reactive Pattern for a single value response. * <p> * {@code Single} behaves similarly to {@link Observable} except that it can only emit either a single successful * value or an error (there is no "onComplete" notification as there is for an {@link Observable}). * <p> * The {@code Single} class implements the {@link SingleSource} base interface and the default consumer * type it interacts with is the {@link SingleObserver} via the {@link #subscribe(SingleObserver)} method. * <p> * The {@code Single} operates with the following sequential protocol: * <pre> * <code>onSubscribe (onSuccess | onError)?</code> * </pre> * <p> * Note that {@code onSuccess} and {@code onError} are mutually exclusive events; unlike {@code Observable}, * {@code onSuccess} is never followed by {@code onError}. * <p> * Like {@code Observable}, a running {@code Single} can be stopped through the {@link Disposable} instance * provided to consumers through {@link SingleObserver#onSubscribe}. * <p> * Like an {@code Observable}, a {@code Single} is lazy, can be either "hot" or "cold", synchronous or * asynchronous. {@code Single} instances returned by the methods of this class are <em>cold</em> * and there is a standard <em>hot</em> implementation in the form of a subject: * {@link io.reactivex.subjects.SingleSubject SingleSubject}. * <p> * The documentation for this class makes use of marble diagrams. The following legend explains these diagrams: * <p> * <img width="640" height="301" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.legend.png" alt=""> * <p> * See {@link Flowable} or {@link Observable} for the * implementation of the Reactive Pattern for a stream or vector of values. * <p> * For more information see the <a href="http://reactivex.io/documentation/single.html">ReactiveX * documentation</a>. * <p> * Example: * <pre><code> * Disposable d = Single.just("Hello World") * .delay(10, TimeUnit.SECONDS, Schedulers.io()) * .subscribeWith(new DisposableSingleObserver&lt;String&gt;() { * &#64;Override * public void onStart() { * System.out.println("Started"); * } * * &#64;Override * public void onSuccess(String value) { * System.out.println("Success: " + value); * } * * &#64;Override * public void onError(Throwable error) { * error.printStackTrace(); * } * }); * * Thread.sleep(5000); * * d.dispose(); * </code></pre> * <p> * Note that by design, subscriptions via {@link #subscribe(SingleObserver)} can't be cancelled/disposed * from the outside (hence the * {@code void} return of the {@link #subscribe(SingleObserver)} method) and it is the * responsibility of the implementor of the {@code SingleObserver} to allow this to happen. * RxJava supports such usage with the standard * {@link io.reactivex.observers.DisposableSingleObserver DisposableSingleObserver} instance. * For convenience, the {@link #subscribeWith(SingleObserver)} method is provided as well to * allow working with a {@code SingleObserver} (or subclass) instance to be applied with in * a fluent manner (such as in the example above). * @param <T> * the type of the item emitted by the Single * @since 2.0 * @see io.reactivex.observers.DisposableSingleObserver */ public abstract class Single<T> implements SingleSource<T> { /** * Runs multiple SingleSources and signals the events of the first one that signals (cancelling * the rest). * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.amb.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code amb} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Iterable sequence of sources. A subscription to each source will * occur in the same order as in this Iterable. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> amb(final Iterable<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new SingleAmb<T>(null, sources)); } /** * Runs multiple SingleSources and signals the events of the first one that signals (cancelling * the rest). * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.ambArray.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ambArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the array of sources. A subscription to each source will * occur in the same order as in this array. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Single<T> ambArray(final SingleSource<? extends T>... sources) { if (sources.length == 0) { return error(SingleInternalHelper.<T>emptyThrower()); } if (sources.length == 1) { return wrap((SingleSource<T>)sources[0]); } return RxJavaPlugins.onAssembly(new SingleAmb<T>(sources, null)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * an Iterable sequence. * <p> * <img width="640" height="319" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.i.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Iterable sequence of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @BackpressureSupport(BackpressureKind.FULL) public static <T> Flowable<T> concat(Iterable<? extends SingleSource<? extends T>> sources) { return concat(Flowable.fromIterable(sources)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * an Observable sequence. * <p> * <img width="640" height="319" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.o.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the ObservableSource of SingleSource instances * @return the new Observable instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Observable<T> concat(ObservableSource<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new ObservableConcatMap(sources, SingleInternalHelper.toObservable(), 2, ErrorMode.IMMEDIATE)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * a Publisher sequence. * <p> * <img width="640" height="308" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.p.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the sources {@code Publisher} is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Publisher of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concat(Publisher<? extends SingleSource<? extends T>> sources) { return concat(sources, 2); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * a Publisher sequence and prefetched by the specified amount. * <p> * <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.pn.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the sources {@code Publisher} is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Publisher of SingleSource instances * @param prefetch the number of SingleSources to prefetch from the Publisher * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> concat(Publisher<? extends SingleSource<? extends T>> sources, int prefetch) { ObjectHelper.requireNonNull(sources, "sources is null"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new FlowableConcatMapPublisher(sources, SingleInternalHelper.toFlowable(), prefetch, ErrorMode.IMMEDIATE)); } /** * Returns a Flowable that emits the items emitted by two Singles, one after the other. * <p> * <img width="640" height="366" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @return a Flowable that emits items emitted by the two source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return concat(Flowable.fromArray(source1, source2)); } /** * Returns a Flowable that emits the items emitted by three Singles, one after the other. * <p> * <img width="640" height="366" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.o3.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @param source3 * a Single to be concatenated * @return a Flowable that emits items emitted by the three source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return concat(Flowable.fromArray(source1, source2, source3)); } /** * Returns a Flowable that emits the items emitted by four Singles, one after the other. * <p> * <img width="640" height="362" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.o4.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @param source3 * a Single to be concatenated * @param source4 * a Single to be concatenated * @return a Flowable that emits items emitted by the four source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return concat(Flowable.fromArray(source1, source2, source3, source4)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided in * an array. * <p> * <img width="640" height="319" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concatArray.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concatArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the array of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> concatArray(SingleSource<? extends T>... sources) { return RxJavaPlugins.onAssembly(new FlowableConcatMap(Flowable.fromArray(sources), SingleInternalHelper.toFlowable(), 2, ErrorMode.BOUNDARY)); } /** * Concatenates a sequence of SingleSource eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * source SingleSources. The operator buffers the value emitted by these SingleSources and then drains them * in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure from downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of Single that need to be eagerly concatenated * @return the new Flowable instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatArrayEager(SingleSource<? extends T>... sources) { return Flowable.fromArray(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Concatenates a Publisher sequence of SingleSources eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * emitted source Publishers as they are observed. The operator buffers the values emitted by these * Publishers and then drains them in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>Backpressure is honored towards the downstream and the outer Publisher is * expected to support backpressure. Violating this assumption, the operator will * signal {@link io.reactivex.exceptions.MissingBackpressureException}.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of Publishers that need to be eagerly concatenated * @return the new Publisher instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatEager(Publisher<? extends SingleSource<? extends T>> sources) { return Flowable.fromPublisher(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Concatenates a sequence of SingleSources eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * source SingleSources. The operator buffers the values emitted by these SingleSources and then drains them * in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>Backpressure is honored towards the downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of SingleSource that need to be eagerly concatenated * @return the new Flowable instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatEager(Iterable<? extends SingleSource<? extends T>> sources) { return Flowable.fromIterable(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Provides an API (via a cold Completable) that bridges the reactive world with the callback-style world. * <p> * Example: * <pre><code> * Single.&lt;Event&gt;create(emitter -&gt; { * Callback listener = new Callback() { * &#64;Override * public void onEvent(Event e) { * emitter.onSuccess(e); * } * * &#64;Override * public void onFailure(Exception e) { * emitter.onError(e); * } * }; * * AutoCloseable c = api.someMethod(listener); * * emitter.setCancellable(c::close); * * }); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code create} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param source the emitter that is called when a SingleObserver subscribes to the returned {@code Single} * @return the new Single instance * @see SingleOnSubscribe * @see Cancellable */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> create(SingleOnSubscribe<T> source) { ObjectHelper.requireNonNull(source, "source is null"); return RxJavaPlugins.onAssembly(new SingleCreate<T>(source)); } /** * Calls a {@link Callable} for each individual {@link SingleObserver} to return the actual {@link SingleSource} to * be subscribed to. * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.defer.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code defer} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param singleSupplier the {@code Callable} that is called for each individual {@code SingleObserver} and * returns a SingleSource instance to subscribe to * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> defer(final Callable<? extends SingleSource<? extends T>> singleSupplier) { ObjectHelper.requireNonNull(singleSupplier, "singleSupplier is null"); return RxJavaPlugins.onAssembly(new SingleDefer<T>(singleSupplier)); } /** * Signals a Throwable returned by the callback function for each individual SingleObserver. * <p> * <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.error.c.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code error} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param errorSupplier the callable that is called for each individual SingleObserver and * returns a Throwable instance to be emitted. * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> error(final Callable<? extends Throwable> errorSupplier) { ObjectHelper.requireNonNull(errorSupplier, "errorSupplier is null"); return RxJavaPlugins.onAssembly(new SingleError<T>(errorSupplier)); } /** * Returns a Single that invokes a subscriber's {@link SingleObserver#onError onError} method when the * subscriber subscribes to it. * <p> * <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.error.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code error} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param exception * the particular Throwable to pass to {@link SingleObserver#onError onError} * @param <T> * the type of the item (ostensibly) emitted by the Single * @return a Single that invokes the subscriber's {@link SingleObserver#onError onError} method when * the subscriber subscribes to it * @see <a href="http://reactivex.io/documentation/operators/empty-never-throw.html">ReactiveX operators documentation: Throw</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> error(final Throwable exception) { ObjectHelper.requireNonNull(exception, "error is null"); return error(Functions.justCallable(exception)); } /** * Returns a {@link Single} that invokes passed function and emits its result for each new SingleObserver that subscribes. * <p> * Allows you to defer execution of passed function until SingleObserver subscribes to the {@link Single}. * It makes passed function "lazy". * Result of the function invocation will be emitted by the {@link Single}. * <p> * <img width="640" height="467" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromCallable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromCallable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param callable * function which execution should be deferred, it will be invoked when SingleObserver will subscribe to the {@link Single}. * @param <T> * the type of the item emitted by the {@link Single}. * @return a {@link Single} whose {@link SingleObserver}s' subscriptions trigger an invocation of the given function. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromCallable(final Callable<? extends T> callable) { ObjectHelper.requireNonNull(callable, "callable is null"); return RxJavaPlugins.onAssembly(new SingleFromCallable<T>(callable)); } /** * Converts a {@link Future} into a {@code Single}. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a Single that emits the return * value of the {@link Future#get} method of that object, by passing the object into the {@code from} * method. * <p> * <em>Important note:</em> This Single is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param future * the source {@link Future} * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromFuture(Future<? extends T> future) { return toSingle(Flowable.<T>fromFuture(future)); } /** * Converts a {@link Future} into a {@code Single}, with a timeout on the Future. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <p> * <em>Important note:</em> This {@code Single} is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param future * the source {@link Future} * @param timeout * the maximum time to wait before calling {@code get} * @param unit * the {@link TimeUnit} of the {@code timeout} argument * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromFuture(Future<? extends T> future, long timeout, TimeUnit unit) { return toSingle(Flowable.<T>fromFuture(future, timeout, unit)); } /** * Converts a {@link Future} into a {@code Single}, with a timeout on the Future. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <p> * <em>Important note:</em> This {@code Single} is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify the {@link Scheduler} where the blocking wait will happen.</dd> * </dl> * * @param future * the source {@link Future} * @param timeout * the maximum time to wait before calling {@code get} * @param unit * the {@link TimeUnit} of the {@code timeout} argument * @param scheduler * the Scheduler to use for the blocking wait * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static <T> Single<T> fromFuture(Future<? extends T> future, long timeout, TimeUnit unit, Scheduler scheduler) { return toSingle(Flowable.<T>fromFuture(future, timeout, unit, scheduler)); } /** * Converts a {@link Future}, operating on a specified {@link Scheduler}, into a {@code Single}. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.s.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param future * the source {@link Future} * @param scheduler * the {@link Scheduler} to wait for the Future on. Use a Scheduler such as * {@link Schedulers#io()} that can block and wait on the Future * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static <T> Single<T> fromFuture(Future<? extends T> future, Scheduler scheduler) { return toSingle(Flowable.<T>fromFuture(future, scheduler)); } /** * Wraps a specific Publisher into a Single and signals its single element or error. * <p>If the source Publisher is empty, a NoSuchElementException is signalled. If * the source has more than one element, an IndexOutOfBoundsException is signalled. * <p> * The {@link Publisher} must follow the * <a href="https://github.com/reactive-streams/reactive-streams-jvm#reactive-streams">Reactive-Streams specification</a>. * Violating the specification may result in undefined behavior. * <p> * If possible, use {@link #create(SingleOnSubscribe)} to create a * source-like {@code Single} instead. * <p> * Note that even though {@link Publisher} appears to be a functional interface, it * is not recommended to implement it through a lambda as the specification requires * state management that is not achievable with a stateless lambda. * <p> * <img width="640" height="322" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromPublisher.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code publisher} is consumed in an unbounded fashion but will be cancelled * if it produced more than one item.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromPublisher} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param publisher the source Publisher instance, not null * @return the new Single instance * @see #create(SingleOnSubscribe) */ @BackpressureSupport(BackpressureKind.UNBOUNDED_IN) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromPublisher(final Publisher<? extends T> publisher) { ObjectHelper.requireNonNull(publisher, "publisher is null"); return RxJavaPlugins.onAssembly(new SingleFromPublisher<T>(publisher)); } /** * Wraps a specific ObservableSource into a Single and signals its single element or error. * <p>If the ObservableSource is empty, a NoSuchElementException is signalled. * If the source has more than one element, an IndexOutOfBoundsException is signalled. * <p> * <img width="640" height="343" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param observableSource the source Observable, not null * @param <T> * the type of the item emitted by the {@link Single}. * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromObservable(ObservableSource<? extends T> observableSource) { ObjectHelper.requireNonNull(observableSource, "observableSource is null"); return RxJavaPlugins.onAssembly(new ObservableSingleSingle<T>(observableSource, null)); } /** * Returns a {@code Single} that emits a specified item. * <p> * <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.just.png" alt=""> * <p> * To convert any object into a {@code Single} that emits that object, pass that object into the * {@code just} method. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code just} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param item * the item to emit * @param <T> * the type of that item * @return a {@code Single} that emits {@code item} * @see <a href="http://reactivex.io/documentation/operators/just.html">ReactiveX operators documentation: Just</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> just(final T item) { ObjectHelper.requireNonNull(item, "value is null"); return RxJavaPlugins.onAssembly(new SingleJust<T>(item)); } /** * Merges an Iterable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(Iterable)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * @param <T> the common and resulting value type * @param sources the Iterable sequence of SingleSource sources * @return the new Flowable instance * @since 2.0 * @see #mergeDelayError(Iterable) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> merge(Iterable<? extends SingleSource<? extends T>> sources) { return merge(Flowable.fromIterable(sources)); } /** * Merges a Flowable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(Publisher)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * @param <T> the common and resulting value type * @param sources the Flowable sequence of SingleSource sources * @return the new Flowable instance * @see #mergeDelayError(Publisher) * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> merge(Publisher<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new FlowableFlatMapPublisher(sources, SingleInternalHelper.toFlowable(), false, Integer.MAX_VALUE, Flowable.bufferSize())); } /** * Flattens a {@code Single} that emits a {@code Single} into a single {@code Single} that emits the item * emitted by the nested {@code Single}, without any transformation. * <p> * <img width="640" height="370" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.oo.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dd>The resulting {@code Single} emits the outer source's or the inner {@code SingleSource}'s {@code Throwable} as is. * Unlike the other {@code merge()} operators, this operator won't and can't produce a {@code CompositeException} because there is * only one possibility for the outer or the inner {@code SingleSource} to emit an {@code onError} signal. * Therefore, there is no need for a {@code mergeDelayError(SingleSource<SingleSource<T>>)} operator. * </dd> * </dl> * * @param <T> the value type of the sources and the output * @param source * a {@code Single} that emits a {@code Single} * @return a {@code Single} that emits the item that is the result of flattening the {@code Single} emitted * by {@code source} * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Single<T> merge(SingleSource<? extends SingleSource<? extends T>> source) { ObjectHelper.requireNonNull(source, "source is null"); return RxJavaPlugins.onAssembly(new SingleFlatMap<SingleSource<? extends T>, T>(source, (Function)Functions.identity())); } /** * Flattens two Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by * using the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return merge(Flowable.fromArray(source1, source2)); } /** * Flattens three Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return merge(Flowable.fromArray(source1, source2, source3)); } /** * Flattens four Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource, SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @param source4 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource, SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return merge(Flowable.fromArray(source1, source2, source3, source4)); } /** * Merges an Iterable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once and delaying any error(s) until all sources succeed or fail. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common and resulting value type * @param sources the Iterable sequence of SingleSource sources * @return the new Flowable instance * @see #merge(Iterable) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> mergeDelayError(Iterable<? extends SingleSource<? extends T>> sources) { return mergeDelayError(Flowable.fromIterable(sources)); } /** * Merges a Flowable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once and delaying any error(s) until all sources succeed or fail. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common and resulting value type * @param sources the Flowable sequence of SingleSource sources * @return the new Flowable instance * @see #merge(Publisher) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> mergeDelayError(Publisher<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new FlowableFlatMapPublisher(sources, SingleInternalHelper.toFlowable(), true, Integer.MAX_VALUE, Flowable.bufferSize())); } /** * Flattens two Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by * using the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return mergeDelayError(Flowable.fromArray(source1, source2)); } /** * Flattens three Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return mergeDelayError(Flowable.fromArray(source1, source2, source3)); } /** * Flattens four Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @param source4 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource, SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return mergeDelayError(Flowable.fromArray(source1, source2, source3, source4)); } /** * Returns a singleton instance of a never-signalling Single (only calls onSubscribe). * <p> * <img width="640" height="244" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.never.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code never} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the target value type * @return the singleton never instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Single<T> never() { return RxJavaPlugins.onAssembly((Single<T>) SingleNever.INSTANCE); } /** * Signals success with 0L value after the given delay for each SingleObserver. * <p> * <img width="640" height="292" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.timer.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timer} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * @param delay the delay amount * @param unit the time unit of the delay * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public static Single<Long> timer(long delay, TimeUnit unit) { return timer(delay, unit, Schedulers.computation()); } /** * Signals success with 0L value after the given delay for each SingleObserver. * <p> * <img width="640" height="292" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.timer.s.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} to signal on.</dd> * </dl> * @param delay the delay amount * @param unit the time unit of the delay * @param scheduler the scheduler where the single 0L will be emitted * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static Single<Long> timer(final long delay, final TimeUnit unit, final Scheduler scheduler) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleTimer(delay, unit, scheduler)); } /** * Compares two SingleSources and emits true if they emit the same value (compared via Object.equals). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code equals} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param first the first SingleSource instance * @param second the second SingleSource instance * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<Boolean> equals(final SingleSource<? extends T> first, final SingleSource<? extends T> second) { // NOPMD ObjectHelper.requireNonNull(first, "first is null"); ObjectHelper.requireNonNull(second, "second is null"); return RxJavaPlugins.onAssembly(new SingleEquals<T>(first, second)); } /** * <strong>Advanced use only:</strong> creates a Single instance without * any safeguards by using a callback that is called with a SingleObserver. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code unsafeCreate} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param onSubscribe the function that is called with the subscribing SingleObserver * @return the new Single instance * @throws IllegalArgumentException if {@code source} is a subclass of {@code Single}; such * instances don't need conversion and is possibly a port remnant from 1.x or one should use {@link #hide()} * instead. * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> unsafeCreate(SingleSource<T> onSubscribe) { ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null"); if (onSubscribe instanceof Single) { throw new IllegalArgumentException("unsafeCreate(Single) should be upgraded"); } return RxJavaPlugins.onAssembly(new SingleFromUnsafeSource<T>(onSubscribe)); } /** * Allows using and disposing a resource while running a SingleSource instance generated from * that resource (similar to a try-with-resources). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code using} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type of the SingleSource generated * @param <U> the resource type * @param resourceSupplier the Callable called for each SingleObserver to generate a resource Object * @param singleFunction the function called with the returned resource * Object from {@code resourceSupplier} and should return a SingleSource instance * to be run by the operator * @param disposer the consumer of the generated resource that is called exactly once for * that particular resource when the generated SingleSource terminates * (successfully or with an error) or gets cancelled. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, U> Single<T> using(Callable<U> resourceSupplier, Function<? super U, ? extends SingleSource<? extends T>> singleFunction, Consumer<? super U> disposer) { return using(resourceSupplier, singleFunction, disposer, true); } /** * Allows using and disposing a resource while running a SingleSource instance generated from * that resource (similar to a try-with-resources). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code using} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type of the SingleSource generated * @param <U> the resource type * @param resourceSupplier the Callable called for each SingleObserver to generate a resource Object * @param singleFunction the function called with the returned resource * Object from {@code resourceSupplier} and should return a SingleSource instance * to be run by the operator * @param disposer the consumer of the generated resource that is called exactly once for * that particular resource when the generated SingleSource terminates * (successfully or with an error) or gets cancelled. * @param eager * if true, the disposer is called before the terminal event is signalled * if false, the disposer is called after the terminal event is delivered to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, U> Single<T> using( final Callable<U> resourceSupplier, final Function<? super U, ? extends SingleSource<? extends T>> singleFunction, final Consumer<? super U> disposer, final boolean eager) { ObjectHelper.requireNonNull(resourceSupplier, "resourceSupplier is null"); ObjectHelper.requireNonNull(singleFunction, "singleFunction is null"); ObjectHelper.requireNonNull(disposer, "disposer is null"); return RxJavaPlugins.onAssembly(new SingleUsing<T, U>(resourceSupplier, singleFunction, disposer, eager)); } /** * Wraps a SingleSource instance into a new Single instance if not already a Single * instance. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code wrap} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param source the source to wrap * @return the Single wrapper or the source cast to Single (if possible) */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> wrap(SingleSource<T> source) { ObjectHelper.requireNonNull(source, "source is null"); if (source instanceof Single) { return RxJavaPlugins.onAssembly((Single<T>)source); } return RxJavaPlugins.onAssembly(new SingleFromUnsafeSource<T>(source)); } /** * Waits until all SingleSource sources provided by the Iterable sequence signal a success * value and calls a zipper function with an array of these values to return a result * to be emitted to downstream. * <p> * If the {@code Iterable} of {@link SingleSource}s is empty a {@link NoSuchElementException} error is signalled after subscription. * <p> * Note on method signature: since Java doesn't allow creating a generic array with {@code new T[]}, the * implementation of this operator has to create an {@code Object[]} instead. Unfortunately, a * {@code Function<Integer[], R>} passed to the method would trigger a {@code ClassCastException}. * * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/zip.png" alt=""> * <p> * If any of the SingleSources signal an error, all other SingleSources get cancelled and the * error emitted to downstream immediately. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param <R> the result value type * @param sources the Iterable sequence of SingleSource instances. An empty sequence will result in an * {@code onError} signal of {@link NoSuchElementException}. * @param zipper the function that receives an array with values from each SingleSource * and should return a value to be emitted to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, R> Single<R> zip(final Iterable<? extends SingleSource<? extends T>> sources, Function<? super Object[], ? extends R> zipper) { ObjectHelper.requireNonNull(zipper, "zipper is null"); ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new SingleZipIterable<T, R>(sources, zipper)); } /** * Returns a Single that emits the results of a specified combiner function applied to two items emitted by * two other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, BiFunction<? super T1, ? super T2, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return zipArray(Functions.toFunction(zipper), source1, source2); } /** * Returns a Single that emits the results of a specified combiner function applied to three items emitted * by three other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3); } /** * Returns a Single that emits the results of a specified combiner function applied to four items * emitted by four other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4); } /** * Returns a Single that emits the results of a specified combiner function applied to five items * emitted by five other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5); } /** * Returns a Single that emits the results of a specified combiner function applied to six items * emitted by six other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6); } /** * Returns a Single that emits the results of a specified combiner function applied to seven items * emitted by seven other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7); } /** * Returns a Single that emits the results of a specified combiner function applied to eight items * emitted by eight other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <T8> the eighth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param source8 * an eighth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, T8, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, SingleSource<? extends T8> source8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); ObjectHelper.requireNonNull(source8, "source8 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7, source8); } /** * Returns a Single that emits the results of a specified combiner function applied to nine items * emitted by nine other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <T8> the eighth source Single's value type * @param <T9> the ninth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param source8 * an eighth source Single * @param source9 * a ninth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, SingleSource<? extends T8> source8, SingleSource<? extends T9> source9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); ObjectHelper.requireNonNull(source8, "source8 is null"); ObjectHelper.requireNonNull(source9, "source9 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7, source8, source9); } /** * Waits until all SingleSource sources provided via an array signal a success * value and calls a zipper function with an array of these values to return a result * to be emitted to downstream. * <p> * If the array of {@link SingleSource}s is empty a {@link NoSuchElementException} error is signalled immediately. * <p> * Note on method signature: since Java doesn't allow creating a generic array with {@code new T[]}, the * implementation of this operator has to create an {@code Object[]} instead. Unfortunately, a * {@code Function<Integer[], R>} passed to the method would trigger a {@code ClassCastException}. * * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/zip.png" alt=""> * <p> * If any of the SingleSources signal an error, all other SingleSources get cancelled and the * error emitted to downstream immediately. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zipArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param <R> the result value type * @param sources the array of SingleSource instances. An empty sequence will result in an * {@code onError} signal of {@link NoSuchElementException}. * @param zipper the function that receives an array with values from each SingleSource * and should return a value to be emitted to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, R> Single<R> zipArray(Function<? super Object[], ? extends R> zipper, SingleSource<? extends T>... sources) { ObjectHelper.requireNonNull(zipper, "zipper is null"); ObjectHelper.requireNonNull(sources, "sources is null"); if (sources.length == 0) { return error(new NoSuchElementException()); } return RxJavaPlugins.onAssembly(new SingleZipArray<T, R>(sources, zipper)); } /** * Signals the event of this or the other SingleSource whichever signals first. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ambWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param other the other SingleSource to race for the first emission of success or error * @return the new Single instance. A subscription to this provided source will occur after subscribing * to the current source. * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Single<T> ambWith(SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return ambArray(this, other); } /** * Calls the specified converter function during assembly time and returns its resulting value. * <p> * This allows fluent conversion to any other type. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code as} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.7 - experimental * @param <R> the resulting object type * @param converter the function that receives the current Single instance and returns a value * @return the converted value * @throws NullPointerException if converter is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> R as(@NonNull SingleConverter<T, ? extends R> converter) { return ObjectHelper.requireNonNull(converter, "converter is null").apply(this); } /** * Hides the identity of the current Single, including the Disposable that is sent * to the downstream via {@code onSubscribe()}. * <p> * <img width="640" height="458" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.hide.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code hide} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> hide() { return RxJavaPlugins.onAssembly(new SingleHide<T>(this)); } /** * Transform a Single by applying a particular Transformer function to it. * <p> * This method operates on the Single itself whereas {@link #lift} operates on the Single's SingleObservers. * <p> * If the operator you are creating is designed to act on the individual item emitted by a Single, use * {@link #lift}. If your operator is designed to transform the source Single as a whole (for instance, by * applying a particular set of existing RxJava operators to it) use {@code compose}. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code compose} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the value type of the single returned by the transformer function * @param transformer the transformer function, not null * @return the source Single, transformed by the transformer function * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> */ @SuppressWarnings("unchecked") @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> compose(SingleTransformer<? super T, ? extends R> transformer) { return wrap(((SingleTransformer<T, R>) ObjectHelper.requireNonNull(transformer, "transformer is null")).apply(this)); } /** * Stores the success value or exception from the current Single and replays it to late SingleObservers. * <p> * The returned Single subscribes to the current Single when the first SingleObserver subscribes. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code cache} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> cache() { return RxJavaPlugins.onAssembly(new SingleCache<T>(this)); } /** * Casts the success value of the current Single into the target type or signals a * ClassCastException if not compatible. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code cast} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the target type * @param clazz the type token to use for casting the success result from the current Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<U> cast(final Class<? extends U> clazz) { ObjectHelper.requireNonNull(clazz, "clazz is null"); return map(Functions.castFunction(clazz)); } /** * Returns a Flowable that emits the item emitted by the source Single, then the item emitted by the * specified Single. * <p> * <img width="640" height="335" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concatWith.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * a Single to be concatenated after the current * @return a Flowable that emits the item emitted by the source Single, followed by the item emitted by * {@code t1} * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> concatWith(SingleSource<? extends T> other) { return concat(this, other); } /** * Delays the emission of the success signal from the current Single by the specified amount. * An error signal will not be delayed. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * * @param time the amount of time the success signal should be delayed for * @param unit the time unit * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delay(long time, TimeUnit unit) { return delay(time, unit, Schedulers.computation(), false); } /** * Delays the emission of the success or error signal from the current Single by the specified amount. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.e.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.5 - experimental * @param time the amount of time the success or error signal should be delayed for * @param unit the time unit * @param delayError if true, both success and error signals are delayed. if false, only success signals are delayed. * @return the new Single instance * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delay(long time, TimeUnit unit, boolean delayError) { return delay(time, unit, Schedulers.computation(), delayError); } /** * Delays the emission of the success signal from the current Single by the specified amount. * An error signal will not be delayed. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.s.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd> * </dl> * * @param time the amount of time the success signal should be delayed for * @param unit the time unit * @param scheduler the target scheduler to use for the non-blocking wait and emission * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delay(final long time, final TimeUnit unit, final Scheduler scheduler) { return delay(time, unit, scheduler, false); } /** * Delays the emission of the success or error signal from the current Single by the specified amount. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.se.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd> * </dl> * <p>History: 2.1.5 - experimental * @param time the amount of time the success or error signal should be delayed for * @param unit the time unit * @param scheduler the target scheduler to use for the non-blocking wait and emission * @param delayError if true, both success and error signals are delayed. if false, only success signals are delayed. * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delay(final long time, final TimeUnit unit, final Scheduler scheduler, boolean delayError) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleDelay<T>(this, time, unit, scheduler, delayError)); } /** * Delays the actual subscription to the current Single until the given other CompletableSource * completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param other the CompletableSource that has to complete before the subscription to the * current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> delaySubscription(CompletableSource other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithCompletable<T>(this, other)); } /** * Delays the actual subscription to the current Single until the given other SingleSource * signals success. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the SingleSource that has to complete before the subscription to the * current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(SingleSource<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithSingle<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given other ObservableSource * signals its first value or completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the ObservableSource that has to signal a value or complete before the * subscription to the current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(ObservableSource<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithObservable<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given other Publisher * signals its first value or completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <p>The other source is consumed in an unbounded manner (requesting Long.MAX_VALUE from it). * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code other} publisher is consumed in an unbounded fashion but will be * cancelled after the first item it produced.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the Publisher that has to signal a value or complete before the * subscription to the current Single happens * @return the new Single instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(Publisher<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithPublisher<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given time delay elapsed. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does by default subscribe to the current Single * on the {@code computation} {@link Scheduler} after the delay.</dd> * </dl> * @param time the time amount to wait with the subscription * @param unit the time unit of the waiting * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delaySubscription(long time, TimeUnit unit) { return delaySubscription(time, unit, Schedulers.computation()); } /** * Delays the actual subscription to the current Single until the given time delay elapsed. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does by default subscribe to the current Single * on the {@link Scheduler} you provided, after the delay.</dd> * </dl> * @param time the time amount to wait with the subscription * @param unit the time unit of the waiting * @param scheduler the scheduler to wait on and subscribe on to the current Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delaySubscription(long time, TimeUnit unit, Scheduler scheduler) { return delaySubscription(Observable.timer(time, unit, scheduler)); } /** * Calls the specified consumer with the success item after this item has been emitted to the downstream. * <p>Note that the {@code doAfterSuccess} action is shared between subscriptions and as such * should be thread-safe. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doAfterSuccess} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.1 - experimental * @param onAfterSuccess the Consumer that will be called after emitting an item from upstream to the downstream * @return the new Single instance * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doAfterSuccess(Consumer<? super T> onAfterSuccess) { ObjectHelper.requireNonNull(onAfterSuccess, "doAfterSuccess is null"); return RxJavaPlugins.onAssembly(new SingleDoAfterSuccess<T>(this, onAfterSuccess)); } /** * Registers an {@link Action} to be called after this Single invokes either onSuccess or onError. * * <p>Note that the {@code doAfterTerminate} action is shared between subscriptions and as such * should be thread-safe.</p> * <p> * <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doAfterTerminate.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doAfterTerminate} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * <p>History: 2.0.6 - experimental * @param onAfterTerminate * an {@link Action} to be invoked when the source Single finishes * @return a Single that emits the same items as the source Single, then invokes the * {@link Action} * @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a> * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doAfterTerminate(Action onAfterTerminate) { ObjectHelper.requireNonNull(onAfterTerminate, "onAfterTerminate is null"); return RxJavaPlugins.onAssembly(new SingleDoAfterTerminate<T>(this, onAfterTerminate)); } /** * Calls the specified action after this Single signals onSuccess or onError or gets disposed by * the downstream. * <p>In case of a race between a terminal event and a dispose call, the provided {@code onFinally} action * is executed once per subscription. * <p>Note that the {@code onFinally} action is shared between subscriptions and as such * should be thread-safe. * <p> * <img width="640" height="291" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doFinally.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doFinally} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.1 - experimental * @param onFinally the action called when this Single terminates or gets cancelled * @return the new Single instance * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doFinally(Action onFinally) { ObjectHelper.requireNonNull(onFinally, "onFinally is null"); return RxJavaPlugins.onAssembly(new SingleDoFinally<T>(this, onFinally)); } /** * Calls the shared consumer with the Disposable sent through the onSubscribe for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="347" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnSubscribe.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnSubscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onSubscribe the consumer called with the Disposable sent via onSubscribe * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnSubscribe(final Consumer<? super Disposable> onSubscribe) { ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null"); return RxJavaPlugins.onAssembly(new SingleDoOnSubscribe<T>(this, onSubscribe)); } /** * Calls the shared consumer with the success value sent via onSuccess for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="347" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnSuccess.2.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnSuccess} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onSuccess the consumer called with the success value of onSuccess * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnSuccess(final Consumer<? super T> onSuccess) { ObjectHelper.requireNonNull(onSuccess, "onSuccess is null"); return RxJavaPlugins.onAssembly(new SingleDoOnSuccess<T>(this, onSuccess)); } /** * Calls the shared consumer with the error sent via onError or the value * via onSuccess for each SingleObserver that subscribes to the current Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnEvent} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onEvent the consumer called with the success value of onEvent * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnEvent(final BiConsumer<? super T, ? super Throwable> onEvent) { ObjectHelper.requireNonNull(onEvent, "onEvent is null"); return RxJavaPlugins.onAssembly(new SingleDoOnEvent<T>(this, onEvent)); } /** * Calls the shared consumer with the error sent via onError for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="349" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnError.2.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onError the consumer called with the success value of onError * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnError(final Consumer<? super Throwable> onError) { ObjectHelper.requireNonNull(onError, "onError is null"); return RxJavaPlugins.onAssembly(new SingleDoOnError<T>(this, onError)); } /** * Calls the shared {@code Action} if a SingleObserver subscribed to the current Single * disposes the common Disposable it received via onSubscribe. * <p> * <img width="640" height="332" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnDispose.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnDispose} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onDispose the action called when the subscription is disposed * @return the new Single instance * @throws NullPointerException if onDispose is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnDispose(final Action onDispose) { ObjectHelper.requireNonNull(onDispose, "onDispose is null"); return RxJavaPlugins.onAssembly(new SingleDoOnDispose<T>(this, onDispose)); } /** * Filters the success item of the Single via a predicate function and emitting it if the predicate * returns true, completing otherwise. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.filter.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code filter} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param predicate * a function that evaluates the item emitted by the source Maybe, returning {@code true} * if it passes the filter * @return a Maybe that emit the item emitted by the source Maybe that the filter * evaluates as {@code true} * @see <a href="http://reactivex.io/documentation/operators/filter.html">ReactiveX operators documentation: Filter</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Maybe<T> filter(Predicate<? super T> predicate) { ObjectHelper.requireNonNull(predicate, "predicate is null"); return RxJavaPlugins.onAssembly(new MaybeFilterSingle<T>(this, predicate)); } /** * Returns a Single that is based on applying a specified function to the item emitted by the source Single, * where that function returns a SingleSource. * <p> * <img width="640" height="300" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMap.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a SingleSource * @return the Single returned from {@code mapper} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> flatMap(Function<? super T, ? extends SingleSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMap<T, R>(this, mapper)); } /** * Returns a Maybe that is based on applying a specified function to the item emitted by the source Single, * where that function returns a MaybeSource. * <p> * <img width="640" height="191" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapMaybe.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a MaybeSource * @return the Maybe returned from {@code mapper} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Maybe<R> flatMapMaybe(final Function<? super T, ? extends MaybeSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapMaybe<T, R>(this, mapper)); } /** * Returns a Flowable that emits items based on applying a specified function to the item emitted by the * source Single, where that function returns a Publisher. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapPublisher.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the {@code Publisher} returned by the mapper function is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapPublisher} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a * Flowable * @return the Flowable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Flowable<R> flatMapPublisher(Function<? super T, ? extends Publisher<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapPublisher<T, R>(this, mapper)); } /** * Returns a Flowable that merges each item emitted by the source Single with the values in an * Iterable corresponding to that item that is generated by a selector. * <p> * <img width="640" height="373" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flattenAsFlowable.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure from downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code flattenAsFlowable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of item emitted by the resulting Iterable * @param mapper * a function that returns an Iterable sequence of values for when given an item emitted by the * source Single * @return the new Flowable instance * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Flowable<U> flattenAsFlowable(final Function<? super T, ? extends Iterable<? extends U>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapIterableFlowable<T, U>(this, mapper)); } /** * Returns an Observable that maps a success value into an Iterable and emits its items. * <p> * <img width="640" height="373" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flattenAsObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flattenAsObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of item emitted by the resulting Iterable * @param mapper * a function that returns an Iterable sequence of values for when given an item emitted by the * source Single * @return the new Observable instance * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Observable<U> flattenAsObservable(final Function<? super T, ? extends Iterable<? extends U>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapIterableObservable<T, U>(this, mapper)); } /** * Returns an Observable that is based on applying a specified function to the item emitted by the source Single, * where that function returns an ObservableSource. * <p> * <img width="640" height="300" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns an ObservableSource * @return the Observable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Observable<R> flatMapObservable(Function<? super T, ? extends ObservableSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapObservable<T, R>(this, mapper)); } /** * Returns a {@link Completable} that completes based on applying a specified function to the item emitted by the * source {@link Single}, where that function returns a {@link Completable}. * <p> * <img width="640" height="267" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapCompletable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param mapper * a function that, when applied to the item emitted by the source Single, returns a * Completable * @return the Completable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Completable flatMapCompletable(final Function<? super T, ? extends CompletableSource> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapCompletable<T>(this, mapper)); } /** * Waits in a blocking fashion until the current Single signals a success value (which is returned) or * an exception (which is propagated). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code blockingGet} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If the source signals an error, the operator wraps a checked {@link Exception} * into {@link RuntimeException} and throws that. Otherwise, {@code RuntimeException}s and * {@link Error}s are rethrown as they are.</dd> * </dl> * @return the success value */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final T blockingGet() { BlockingMultiObserver<T> observer = new BlockingMultiObserver<T>(); subscribe(observer); return observer.blockingGet(); } /** * <strong>This method requires advanced knowledge about building operators, please consider * other standard composition methods first;</strong> * Returns a {@code Single} which, when subscribed to, invokes the {@link SingleOperator#apply(SingleObserver) apply(SingleObserver)} method * of the provided {@link SingleOperator} for each individual downstream {@link Single} and allows the * insertion of a custom operator by accessing the downstream's {@link SingleObserver} during this subscription phase * and providing a new {@code SingleObserver}, containing the custom operator's intended business logic, that will be * used in the subscription process going further upstream. * <p> * Generally, such a new {@code SingleObserver} will wrap the downstream's {@code SingleObserver} and forwards the * {@code onSuccess} and {@code onError} events from the upstream directly or according to the * emission pattern the custom operator's business logic requires. In addition, such operator can intercept the * flow control calls of {@code dispose} and {@code isDisposed} that would have traveled upstream and perform * additional actions depending on the same business logic requirements. * <p> * Example: * <pre><code> * // Step 1: Create the consumer type that will be returned by the SingleOperator.apply(): * * public final class CustomSingleObserver&lt;T&gt; implements SingleObserver&lt;T&gt;, Disposable { * * // The downstream's SingleObserver that will receive the onXXX events * final SingleObserver&lt;? super String&gt; downstream; * * // The connection to the upstream source that will call this class' onXXX methods * Disposable upstream; * * // The constructor takes the downstream subscriber and usually any other parameters * public CustomSingleObserver(SingleObserver&lt;? super String&gt; downstream) { * this.downstream = downstream; * } * * // In the subscription phase, the upstream sends a Disposable to this class * // and subsequently this class has to send a Disposable to the downstream. * // Note that relaying the upstream's Disposable directly is not allowed in RxJava * &#64;Override * public void onSubscribe(Disposable s) { * if (upstream != null) { * s.cancel(); * } else { * upstream = s; * downstream.onSubscribe(this); * } * } * * // The upstream calls this with the next item and the implementation's * // responsibility is to emit an item to the downstream based on the intended * // business logic, or if it can't do so for the particular item, * // request more from the upstream * &#64;Override * public void onSuccess(T item) { * String str = item.toString(); * if (str.length() &lt; 2) { * downstream.onSuccess(str); * } else { * // Single is usually expected to produce one of the onXXX events * downstream.onError(new NoSuchElementException()); * } * } * * // Some operators may handle the upstream's error while others * // could just forward it to the downstream. * &#64;Override * public void onError(Throwable throwable) { * downstream.onError(throwable); * } * * // Some operators may use their own resources which should be cleaned up if * // the downstream disposes the flow before it completed. Operators without * // resources can simply forward the dispose to the upstream. * // In some cases, a disposed flag may be set by this method so that other parts * // of this class may detect the dispose and stop sending events * // to the downstream. * &#64;Override * public void dispose() { * upstream.dispose(); * } * * // Some operators may simply forward the call to the upstream while others * // can return the disposed flag set in dispose(). * &#64;Override * public boolean isDisposed() { * return upstream.isDisposed(); * } * } * * // Step 2: Create a class that implements the SingleOperator interface and * // returns the custom consumer type from above in its apply() method. * // Such class may define additional parameters to be submitted to * // the custom consumer type. * * final class CustomSingleOperator&lt;T&gt; implements SingleOperator&lt;String&gt; { * &#64;Override * public SingleObserver&lt;? super String&gt; apply(SingleObserver&lt;? super T&gt; upstream) { * return new CustomSingleObserver&lt;T&gt;(upstream); * } * } * * // Step 3: Apply the custom operator via lift() in a flow by creating an instance of it * // or reusing an existing one. * * Single.just(5) * .lift(new CustomSingleOperator&lt;Integer&gt;()) * .test() * .assertResult("5"); * * Single.just(15) * .lift(new CustomSingleOperator&lt;Integer&gt;()) * .test() * .assertFailure(NoSuchElementException.class); * </code></pre> * <p> * Creating custom operators can be complicated and it is recommended one consults the * <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">RxJava wiki: Writing operators</a> page about * the tools, requirements, rules, considerations and pitfalls of implementing them. * <p> * Note that implementing custom operators via this {@code lift()} method adds slightly more overhead by requiring * an additional allocation and indirection per assembled flows. Instead, extending the abstract {@code Single} * class and creating a {@link SingleTransformer} with it is recommended. * <p> * Note also that it is not possible to stop the subscription phase in {@code lift()} as the {@code apply()} method * requires a non-null {@code SingleObserver} instance to be returned, which is then unconditionally subscribed to * the upstream {@code Single}. For example, if the operator decided there is no reason to subscribe to the * upstream source because of some optimization possibility or a failure to prepare the operator, it still has to * return a {@code SingleObserver} that should immediately dispose the upstream's {@code Disposable} in its * {@code onSubscribe} method. Again, using a {@code SingleTransformer} and extending the {@code Single} is * a better option as {@link #subscribeActual} can decide to not subscribe to its upstream after all. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code lift} does not operate by default on a particular {@link Scheduler}, however, the * {@link SingleOperator} may use a {@code Scheduler} to support its own asynchronous behavior.</dd> * </dl> * * @param <R> the output value type * @param lift the {@link SingleOperator} that receives the downstream's {@code SingleObserver} and should return * a {@code SingleObserver} with custom behavior to be used as the consumer for the current * {@code Single}. * @return the new Single instance * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">RxJava wiki: Writing operators</a> * @see #compose(SingleTransformer) */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> lift(final SingleOperator<? extends R, ? super T> lift) { ObjectHelper.requireNonNull(lift, "onLift is null"); return RxJavaPlugins.onAssembly(new SingleLift<T, R>(this, lift)); } /** * Returns a Single that applies a specified function to the item emitted by the source Single and * emits the result of this function application. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.map.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code map} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function to apply to the item emitted by the Single * @return a Single that emits the item from the source Single, transformed by the specified function * @see <a href="http://reactivex.io/documentation/operators/map.html">ReactiveX operators documentation: Map</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> map(Function<? super T, ? extends R> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleMap<T, R>(this, mapper)); } /** * Signals true if the current Single signals a success value that is Object-equals with the value * provided. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code contains} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to compare against the success value of this Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<Boolean> contains(Object value) { return contains(value, ObjectHelper.equalsPredicate()); } /** * Signals true if the current Single signals a success value that is equal with * the value provided by calling a bi-predicate. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code contains} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to compare against the success value of this Single * @param comparer the function that receives the success value of this Single, the value provided * and should return true if they are considered equal * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<Boolean> contains(final Object value, final BiPredicate<Object, Object> comparer) { ObjectHelper.requireNonNull(value, "value is null"); ObjectHelper.requireNonNull(comparer, "comparer is null"); return RxJavaPlugins.onAssembly(new SingleContains<T>(this, value, comparer)); } /** * Flattens this and another Single into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeWith} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * a SingleSource to be merged * @return that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> mergeWith(SingleSource<? extends T> other) { return merge(this, other); } /** * Modifies a Single to emit its item (or notify of its error) on a specified {@link Scheduler}, * asynchronously. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.observeOn.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param scheduler * the {@link Scheduler} to notify subscribers on * @return the source Single modified so that its subscribers are notified on the specified * {@link Scheduler} * @throws NullPointerException if scheduler is null * @see <a href="http://reactivex.io/documentation/operators/observeon.html">ReactiveX operators documentation: ObserveOn</a> * @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a> * @see #subscribeOn */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> observeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleObserveOn<T>(this, scheduler)); } /** * Instructs a Single to emit an item (returned by a specified function) rather than invoking * {@link SingleObserver#onError onError} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorReturn.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to its * subscriber, the Single invokes its subscriber's {@link SingleObserver#onError} method, and then quits * without invoking any more of its subscriber's methods. The {@code onErrorReturn} method changes this * behavior. If you pass a function ({@code resumeFunction}) to a Single's {@code onErrorReturn} method, if * the original Single encounters an error, instead of invoking its subscriber's * {@link SingleObserver#onError} method, it will instead emit the return value of {@code resumeFunction}. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorReturn} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeFunction * a function that returns an item that the new Single will emit if the source Single encounters * an error * @return the original Single with appropriately modified behavior * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorReturn(final Function<Throwable, ? extends T> resumeFunction) { ObjectHelper.requireNonNull(resumeFunction, "resumeFunction is null"); return RxJavaPlugins.onAssembly(new SingleOnErrorReturn<T>(this, resumeFunction, null)); } /** * Signals the specified value as success in case the current Single signals an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorReturnItem.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorReturnItem} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to signal if the current Single fails * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorReturnItem(final T value) { ObjectHelper.requireNonNull(value, "value is null"); return RxJavaPlugins.onAssembly(new SingleOnErrorReturn<T>(this, null, value)); } /** * Instructs a Single to pass control to another Single rather than invoking * {@link SingleObserver#onError(Throwable)} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorResumeNext.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to * its {@link SingleObserver}, the Single invokes its SingleObserver's {@code onError} method, and then quits * without invoking any more of its SingleObserver's methods. The {@code onErrorResumeNext} method changes this * behavior. If you pass another Single ({@code resumeSingleInCaseOfError}) to a Single's * {@code onErrorResumeNext} method, if the original Single encounters an error, instead of invoking its * SingleObserver's {@code onError} method, it will instead relinquish control to {@code resumeSingleInCaseOfError} which * will invoke the SingleObserver's {@link SingleObserver#onSuccess onSuccess} method if it is able to do so. In such a case, * because no Single necessarily invokes {@code onError}, the SingleObserver may never know that an error * happened. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorResumeNext} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeSingleInCaseOfError a Single that will take control if source Single encounters an error. * @return the original Single, with appropriately modified behavior. * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorResumeNext(final Single<? extends T> resumeSingleInCaseOfError) { ObjectHelper.requireNonNull(resumeSingleInCaseOfError, "resumeSingleInCaseOfError is null"); return onErrorResumeNext(Functions.justFunction(resumeSingleInCaseOfError)); } /** * Instructs a Single to pass control to another Single rather than invoking * {@link SingleObserver#onError(Throwable)} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorResumeNext.f.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to * its {@link SingleObserver}, the Single invokes its SingleObserver's {@code onError} method, and then quits * without invoking any more of its SingleObserver's methods. The {@code onErrorResumeNext} method changes this * behavior. If you pass a function that will return another Single ({@code resumeFunctionInCaseOfError}) to a Single's * {@code onErrorResumeNext} method, if the original Single encounters an error, instead of invoking its * SingleObserver's {@code onError} method, it will instead relinquish control to {@code resumeSingleInCaseOfError} which * will invoke the SingleObserver's {@link SingleObserver#onSuccess onSuccess} method if it is able to do so. In such a case, * because no Single necessarily invokes {@code onError}, the SingleObserver may never know that an error * happened. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorResumeNext} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeFunctionInCaseOfError a function that returns a Single that will take control if source Single encounters an error. * @return the original Single, with appropriately modified behavior. * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> * @since .20 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorResumeNext( final Function<? super Throwable, ? extends SingleSource<? extends T>> resumeFunctionInCaseOfError) { ObjectHelper.requireNonNull(resumeFunctionInCaseOfError, "resumeFunctionInCaseOfError is null"); return RxJavaPlugins.onAssembly(new SingleResumeNext<T>(this, resumeFunctionInCaseOfError)); } /** * Nulls out references to the upstream producer and downstream SingleObserver if * the sequence is terminated or downstream calls dispose(). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onTerminateDetach} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.5 - experimental * @return a Single which nulls out references to the upstream producer and downstream SingleObserver if * the sequence is terminated or downstream calls dispose() * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onTerminateDetach() { return RxJavaPlugins.onAssembly(new SingleDetach<T>(this)); } /** * Repeatedly re-subscribes to the current Single and emits each success value. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeat() { return toFlowable().repeat(); } /** * Re-subscribes to the current Single at most the given number of times and emits each success value. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.n.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param times the number of times to re-subscribe to the current Single * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeat(long times) { return toFlowable().repeat(times); } /** * Re-subscribes to the current Single if * the Publisher returned by the handler function signals a value in response to a * value signalled through the Flowable the handle receives. * <p> * <img width="640" height="1478" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeatWhen.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer. * The {@code Publisher} returned by the handler function is expected to honor backpressure as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeatWhen} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param handler the function that is called with a Flowable that signals a value when the Single * signalled a success value and returns a Publisher that has to signal a value to * trigger a resubscription to the current Single, otherwise the terminal signal of * the Publisher will be the terminal signal of the sequence as well. * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeatWhen(Function<? super Flowable<Object>, ? extends Publisher<?>> handler) { return toFlowable().repeatWhen(handler); } /** * Re-subscribes to the current Single until the given BooleanSupplier returns true. * <p> * <img width="640" height="463" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeatUntil.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeatUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param stop the BooleanSupplier called after the current Single succeeds and if returns false, * the Single is re-subscribed; otherwise the sequence completes. * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeatUntil(BooleanSupplier stop) { return toFlowable().repeatUntil(stop); } /** * Repeatedly re-subscribes to the current Single indefinitely if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry() { return toSingle(toFlowable().retry()); } /** * Repeatedly re-subscribe at most the specified times to the current Single * if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param times the number of times to resubscribe if the current Single fails * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(long times) { return toSingle(toFlowable().retry(times)); } /** * Re-subscribe to the current Single if the given predicate returns true when the Single fails * with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param predicate the predicate called with the resubscription count and the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(BiPredicate<? super Integer, ? super Throwable> predicate) { return toSingle(toFlowable().retry(predicate)); } /** * Repeatedly re-subscribe at most times or until the predicate returns false, whichever happens first * if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.8 - experimental * @param times the number of times to resubscribe if the current Single fails * @param predicate the predicate called with the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(long times, Predicate<? super Throwable> predicate) { return toSingle(toFlowable().retry(times, predicate)); } /** * Re-subscribe to the current Single if the given predicate returns true when the Single fails * with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param predicate the predicate called with the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(Predicate<? super Throwable> predicate) { return toSingle(toFlowable().retry(predicate)); } /** * Re-subscribes to the current Single if and when the Publisher returned by the handler * function signals a value. * <p> * If the Publisher signals an onComplete, the resulting Single will signal a NoSuchElementException. * <p> * Note that the inner {@code Publisher} returned by the handler function should signal * either {@code onNext}, {@code onError} or {@code onComplete} in response to the received * {@code Throwable} to indicate the operator should retry or terminate. If the upstream to * the operator is asynchronous, signalling onNext followed by onComplete immediately may * result in the sequence to be completed immediately. Similarly, if this inner * {@code Publisher} signals {@code onError} or {@code onComplete} while the upstream is * active, the sequence is terminated with the same signal immediately. * <p> * The following example demonstrates how to retry an asynchronous source with a delay: * <pre><code> * Single.timer(1, TimeUnit.SECONDS) * .doOnSubscribe(s -&gt; System.out.println("subscribing")) * .map(v -&gt; { throw new RuntimeException(); }) * .retryWhen(errors -&gt; { * AtomicInteger counter = new AtomicInteger(); * return errors * .takeWhile(e -&gt; counter.getAndIncrement() != 3) * .flatMap(e -&gt; { * System.out.println("delay retry by " + counter.get() + " second(s)"); * return Flowable.timer(counter.get(), TimeUnit.SECONDS); * }); * }) * .blockingGet(); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retryWhen} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param handler the function that receives a Flowable of the error the Single emits and should * return a Publisher that should signal a normal value (in response to the * throwable the Flowable emits) to trigger a resubscription or signal an error to * be the output of the resulting Single * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retryWhen(Function<? super Flowable<Throwable>, ? extends Publisher<?>> handler) { return toSingle(toFlowable().retryWhen(handler)); } /** * Subscribes to a Single but ignore its emission or notification. * <p> * If the Single emits an error, it is wrapped into an * {@link io.reactivex.exceptions.OnErrorNotImplementedException OnErrorNotImplementedException} * and routed to the RxJavaPlugins.onError handler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> */ @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe() { return subscribe(Functions.emptyConsumer(), Functions.ON_ERROR_MISSING); } /** * Subscribes to a Single and provides a composite callback to handle the item it emits * or any error notification it issues. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onCallback * the callback that receives either the success value or the failure Throwable * (whichever is not null) * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> * @throws NullPointerException * if {@code onCallback} is null */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(final BiConsumer<? super T, ? super Throwable> onCallback) { ObjectHelper.requireNonNull(onCallback, "onCallback is null"); BiConsumerSingleObserver<T> observer = new BiConsumerSingleObserver<T>(onCallback); subscribe(observer); return observer; } /** * Subscribes to a Single and provides a callback to handle the item it emits. * <p> * If the Single emits an error, it is wrapped into an * {@link io.reactivex.exceptions.OnErrorNotImplementedException OnErrorNotImplementedException} * and routed to the RxJavaPlugins.onError handler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onSuccess * the {@code Consumer<T>} you have designed to accept the emission from the Single * @return a {@link Disposable} reference can request the {@link Single} stop work. * @throws NullPointerException * if {@code onSuccess} is null * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(Consumer<? super T> onSuccess) { return subscribe(onSuccess, Functions.ON_ERROR_MISSING); } /** * Subscribes to a Single and provides callbacks to handle the item it emits or any error notification it * issues. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onSuccess * the {@code Consumer<T>} you have designed to accept the emission from the Single * @param onError * the {@code Consumer<Throwable>} you have designed to accept any error notification from the * Single * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> * @throws NullPointerException * if {@code onSuccess} is null, or * if {@code onError} is null */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(final Consumer<? super T> onSuccess, final Consumer<? super Throwable> onError) { ObjectHelper.requireNonNull(onSuccess, "onSuccess is null"); ObjectHelper.requireNonNull(onError, "onError is null"); ConsumerSingleObserver<T> observer = new ConsumerSingleObserver<T>(onSuccess, onError); subscribe(observer); return observer; } @SchedulerSupport(SchedulerSupport.NONE) @Override public final void subscribe(SingleObserver<? super T> observer) { ObjectHelper.requireNonNull(observer, "subscriber is null"); observer = RxJavaPlugins.onSubscribe(this, observer); ObjectHelper.requireNonNull(observer, "subscriber returned by the RxJavaPlugins hook is null"); try { subscribeActual(observer); } catch (NullPointerException ex) { throw ex; } catch (Throwable ex) { Exceptions.throwIfFatal(ex); NullPointerException npe = new NullPointerException("subscribeActual failed"); npe.initCause(ex); throw npe; } } /** * Implement this method in subclasses to handle the incoming {@link SingleObserver}s. * <p>There is no need to call any of the plugin hooks on the current {@code Single} instance or * the {@code SingleObserver}; all hooks and basic safeguards have been * applied by {@link #subscribe(SingleObserver)} before this method gets called. * @param observer the SingleObserver to handle, not null */ protected abstract void subscribeActual(@NonNull SingleObserver<? super T> observer); /** * Subscribes a given SingleObserver (subclass) to this Single and returns the given * SingleObserver as is. * <p>Usage example: * <pre><code> * Single&lt;Integer&gt; source = Single.just(1); * CompositeDisposable composite = new CompositeDisposable(); * * DisposableSingleObserver&lt;Integer&gt; ds = new DisposableSingleObserver&lt;&gt;() { * // ... * }; * * composite.add(source.subscribeWith(ds)); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribeWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <E> the type of the SingleObserver to use and return * @param observer the SingleObserver (subclass) to use and return, not null * @return the input {@code observer} * @throws NullPointerException if {@code observer} is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E extends SingleObserver<? super T>> E subscribeWith(E observer) { subscribe(observer); return observer; } /** * Asynchronously subscribes subscribers to this Single on the specified {@link Scheduler}. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.subscribeOn.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param scheduler * the {@link Scheduler} to perform subscription actions on * @return the source Single modified so that its subscriptions happen on the specified {@link Scheduler} * @see <a href="http://reactivex.io/documentation/operators/subscribeon.html">ReactiveX operators documentation: SubscribeOn</a> * @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a> * @see #observeOn */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> subscribeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleSubscribeOn<T>(this, scheduler)); } /** * Returns a Single that emits the item emitted by the source Single until a Completable terminates. Upon * termination of {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Completable whose termination will cause {@code takeUntil} to emit the item from the source * Single * @return a Single that emits the item emitted by the source Single until such time as {@code other} terminates. * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> takeUntil(final CompletableSource other) { ObjectHelper.requireNonNull(other, "other is null"); return takeUntil(new CompletableToFlowable<T>(other)); } /** * Returns a Single that emits the item emitted by the source Single until a Publisher emits an item. Upon * emission of an item from {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code other} publisher is consumed in an unbounded fashion but will be * cancelled after the first item it produced.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Publisher whose first emitted item will cause {@code takeUntil} to emit the item from the source * Single * @param <E> * the type of items emitted by {@code other} * @return a Single that emits the item emitted by the source Single until such time as {@code other} emits * its first item * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E> Single<T> takeUntil(final Publisher<E> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleTakeUntil<T, E>(this, other)); } /** * Returns a Single that emits the item emitted by the source Single until a second Single emits an item. Upon * emission of an item from {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Single whose emitted item will cause {@code takeUntil} to emit the item from the source Single * @param <E> * the type of item emitted by {@code other} * @return a Single that emits the item emitted by the source Single until such time as {@code other} emits its item * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E> Single<T> takeUntil(final SingleSource<? extends E> other) { ObjectHelper.requireNonNull(other, "other is null"); return takeUntil(new SingleToFlowable<E>(other)); } /** * Signals a TimeoutException if the current Single doesn't signal a success value within the * specified timeout window. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} signals the TimeoutException on the {@code computation} {@link Scheduler}.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> timeout(long timeout, TimeUnit unit) { return timeout0(timeout, unit, Schedulers.computation(), null); } /** * Signals a TimeoutException if the current Single doesn't signal a success value within the * specified timeout window. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} signals the TimeoutException on the {@link Scheduler} you specify.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param scheduler the target scheduler where the timeout is awaited and the TimeoutException * signalled * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler) { return timeout0(timeout, unit, scheduler, null); } /** * Runs the current Single and if it doesn't signal within the specified timeout window, it is * cancelled and the other SingleSource subscribed to. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} subscribes to the other SingleSource on the {@link Scheduler} you specify.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param scheduler the scheduler where the timeout is awaited and the subscription to other happens * @param other the other SingleSource that gets subscribed to if the current Single times out * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler, SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return timeout0(timeout, unit, scheduler, other); } /** * Runs the current Single and if it doesn't signal within the specified timeout window, it is * cancelled and the other SingleSource subscribed to. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} subscribes to the other SingleSource on * the {@code computation} {@link Scheduler}.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param other the other SingleSource that gets subscribed to if the current Single times out * @return the new Single instance * @throws NullPointerException * if other is null, or * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> timeout(long timeout, TimeUnit unit, SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return timeout0(timeout, unit, Schedulers.computation(), other); } private Single<T> timeout0(final long timeout, final TimeUnit unit, final Scheduler scheduler, final SingleSource<? extends T> other) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleTimeout<T>(this, timeout, unit, scheduler, other)); } /** * Calls the specified converter function with the current Single instance * during assembly time and returns its result. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code to} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <R> the result type * @param convert the function that is called with the current Single instance during * assembly time that should return some value to be the result * * @return the value returned by the convert function */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> R to(Function<? super Single<T>, R> convert) { try { return ObjectHelper.requireNonNull(convert, "convert is null").apply(this); } catch (Throwable ex) { Exceptions.throwIfFatal(ex); throw ExceptionHelper.wrapOrThrow(ex); } } /** * Returns a {@link Completable} that discards result of the {@link Single} * and calls {@code onComplete} when this source {@link Single} calls * {@code onSuccess}. Error terminal event is propagated. * <p> * <img width="640" height="436" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toCompletable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toCompletable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Completable} that calls {@code onComplete} on it's subscriber when the source {@link Single} * calls {@code onSuccess}. * @since 2.0 * @deprecated see {@link #ignoreElement()} instead, will be removed in 3.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @Deprecated public final Completable toCompletable() { return RxJavaPlugins.onAssembly(new CompletableFromSingle<T>(this)); } /** * Returns a {@link Completable} that ignores the success value of this {@link Single} * and calls {@code onComplete} instead on the returned {@code Completable}. * <p> * <img width="640" height="436" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.ignoreElement.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ignoreElement} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Completable} that calls {@code onComplete} on it's observer when the source {@link Single} * calls {@code onSuccess}. * @since 2.1.13 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Completable ignoreElement() { return RxJavaPlugins.onAssembly(new CompletableFromSingle<T>(this)); } /** * Converts this Single into a {@link Flowable}. * <p> * <img width="640" height="462" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toFlowable.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code toFlowable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Flowable} that emits a single item T or an error. */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Flowable<T> toFlowable() { if (this instanceof FuseToFlowable) { return ((FuseToFlowable<T>)this).fuseToFlowable(); } return RxJavaPlugins.onAssembly(new SingleToFlowable<T>(this)); } /** * Returns a {@link Future} representing the single value emitted by this {@code Single}. * <p> * <img width="640" height="467" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/Single.toFuture.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Future} that expects a single item to be emitted by this {@code Single} * @see <a href="http://reactivex.io/documentation/operators/to.html">ReactiveX documentation: To</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Future<T> toFuture() { return subscribeWith(new FutureSingleObserver<T>()); } /** * Converts this Single into a {@link Maybe}. * <p> * <img width="640" height="463" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toMaybe.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toMaybe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Maybe} that emits a single item T or an error. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Maybe<T> toMaybe() { if (this instanceof FuseToMaybe) { return ((FuseToMaybe<T>)this).fuseToMaybe(); } return RxJavaPlugins.onAssembly(new MaybeFromSingle<T>(this)); } /** * Converts this Single into an {@link Observable}. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return an {@link Observable} that emits a single item T or an error. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Observable<T> toObservable() { if (this instanceof FuseToObservable) { return ((FuseToObservable<T>)this).fuseToObservable(); } return RxJavaPlugins.onAssembly(new SingleToObservable<T>(this)); } /** * Returns a Single which makes sure when a SingleObserver disposes the Disposable, * that call is propagated up on the specified scheduler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code unsubscribeOn} calls dispose() of the upstream on the {@link Scheduler} you specify.</dd> * </dl> * <p>History: 2.0.9 - experimental * @param scheduler the target scheduler where to execute the cancellation * @return the new Single instance * @throws NullPointerException if scheduler is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> unsubscribeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleUnsubscribeOn<T>(this, scheduler)); } /** * Returns a Single that emits the result of applying a specified function to the pair of items emitted by * the source Single and another specified Single. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zipWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of items emitted by the {@code other} Single * @param <R> * the type of items emitted by the resulting Single * @param other * the other SingleSource * @param zipper * a function that combines the pairs of items from the two SingleSources to generate the items to * be emitted by the resulting Single * @return a Single that pairs up values from the source Single and the {@code other} SingleSource * and emits the results of {@code zipFunction} applied to these pairs * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U, R> Single<R> zipWith(SingleSource<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(this, other, zipper); } // ------------------------------------------------------------------------- // Fluent test support, super handy and reduces test preparation boilerplate // ------------------------------------------------------------------------- /** * Creates a TestObserver and subscribes * it to this Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code test} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new TestObserver instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final TestObserver<T> test() { TestObserver<T> to = new TestObserver<T>(); subscribe(to); return to; } /** * Creates a TestObserver optionally in cancelled state, then subscribes it to this Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code test} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param cancelled if true, the TestObserver will be cancelled before subscribing to this * Single. * @return the new TestObserver instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final TestObserver<T> test(boolean cancelled) { TestObserver<T> to = new TestObserver<T>(); if (cancelled) { to.cancel(); } subscribe(to); return to; } private static <T> Single<T> toSingle(Flowable<T> source) { return RxJavaPlugins.onAssembly(new FlowableSingleSingle<T>(source, null)); } }
src/main/java/io/reactivex/Single.java
/** * Copyright (c) 2016-present, RxJava Contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See * the License for the specific language governing permissions and limitations under the License. */ package io.reactivex; import java.util.NoSuchElementException; import java.util.concurrent.*; import org.reactivestreams.Publisher; import io.reactivex.annotations.*; import io.reactivex.disposables.Disposable; import io.reactivex.exceptions.Exceptions; import io.reactivex.functions.*; import io.reactivex.internal.functions.*; import io.reactivex.internal.fuseable.*; import io.reactivex.internal.observers.*; import io.reactivex.internal.operators.completable.*; import io.reactivex.internal.operators.flowable.*; import io.reactivex.internal.operators.maybe.*; import io.reactivex.internal.operators.mixed.*; import io.reactivex.internal.operators.observable.*; import io.reactivex.internal.operators.single.*; import io.reactivex.internal.util.*; import io.reactivex.observers.TestObserver; import io.reactivex.plugins.RxJavaPlugins; import io.reactivex.schedulers.Schedulers; /** * The {@code Single} class implements the Reactive Pattern for a single value response. * <p> * {@code Single} behaves similarly to {@link Observable} except that it can only emit either a single successful * value or an error (there is no "onComplete" notification as there is for an {@link Observable}). * <p> * The {@code Single} class implements the {@link SingleSource} base interface and the default consumer * type it interacts with is the {@link SingleObserver} via the {@link #subscribe(SingleObserver)} method. * <p> * The {@code Single} operates with the following sequential protocol: * <pre> * <code>onSubscribe (onSuccess | onError)?</code> * </pre> * <p> * Note that {@code onSuccess} and {@code onError} are mutually exclusive events; unlike {@code Observable}, * {@code onSuccess} is never followed by {@code onError}. * <p> * Like {@code Observable}, a running {@code Single} can be stopped through the {@link Disposable} instance * provided to consumers through {@link SingleObserver#onSubscribe}. * <p> * Like an {@code Observable}, a {@code Single} is lazy, can be either "hot" or "cold", synchronous or * asynchronous. {@code Single} instances returned by the methods of this class are <em>cold</em> * and there is a standard <em>hot</em> implementation in the form of a subject: * {@link io.reactivex.subjects.SingleSubject SingleSubject}. * <p> * The documentation for this class makes use of marble diagrams. The following legend explains these diagrams: * <p> * <img width="640" height="301" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.legend.png" alt=""> * <p> * See {@link Flowable} or {@link Observable} for the * implementation of the Reactive Pattern for a stream or vector of values. * <p> * For more information see the <a href="http://reactivex.io/documentation/single.html">ReactiveX * documentation</a>. * <p> * Example: * <pre><code> * Disposable d = Single.just("Hello World") * .delay(10, TimeUnit.SECONDS, Schedulers.io()) * .subscribeWith(new DisposableSingleObserver&lt;String&gt;() { * &#64;Override * public void onStart() { * System.out.println("Started"); * } * * &#64;Override * public void onSuccess(String value) { * System.out.println("Success: " + value); * } * * &#64;Override * public void onError(Throwable error) { * error.printStackTrace(); * } * }); * * Thread.sleep(5000); * * d.dispose(); * </code></pre> * <p> * Note that by design, subscriptions via {@link #subscribe(SingleObserver)} can't be cancelled/disposed * from the outside (hence the * {@code void} return of the {@link #subscribe(SingleObserver)} method) and it is the * responsibility of the implementor of the {@code SingleObserver} to allow this to happen. * RxJava supports such usage with the standard * {@link io.reactivex.observers.DisposableSingleObserver DisposableSingleObserver} instance. * For convenience, the {@link #subscribeWith(SingleObserver)} method is provided as well to * allow working with a {@code SingleObserver} (or subclass) instance to be applied with in * a fluent manner (such as in the example above). * @param <T> * the type of the item emitted by the Single * @since 2.0 * @see io.reactivex.observers.DisposableSingleObserver */ public abstract class Single<T> implements SingleSource<T> { /** * Runs multiple SingleSources and signals the events of the first one that signals (cancelling * the rest). * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.amb.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code amb} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Iterable sequence of sources. A subscription to each source will * occur in the same order as in this Iterable. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> amb(final Iterable<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new SingleAmb<T>(null, sources)); } /** * Runs multiple SingleSources and signals the events of the first one that signals (cancelling * the rest). * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.ambArray.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ambArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the array of sources. A subscription to each source will * occur in the same order as in this array. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Single<T> ambArray(final SingleSource<? extends T>... sources) { if (sources.length == 0) { return error(SingleInternalHelper.<T>emptyThrower()); } if (sources.length == 1) { return wrap((SingleSource<T>)sources[0]); } return RxJavaPlugins.onAssembly(new SingleAmb<T>(sources, null)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * an Iterable sequence. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Iterable sequence of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @BackpressureSupport(BackpressureKind.FULL) public static <T> Flowable<T> concat(Iterable<? extends SingleSource<? extends T>> sources) { return concat(Flowable.fromIterable(sources)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * an Observable sequence. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the ObservableSource of SingleSource instances * @return the new Observable instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Observable<T> concat(ObservableSource<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new ObservableConcatMap(sources, SingleInternalHelper.toObservable(), 2, ErrorMode.IMMEDIATE)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * a Publisher sequence. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the sources {@code Publisher} is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Publisher of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concat(Publisher<? extends SingleSource<? extends T>> sources) { return concat(sources, 2); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided by * a Publisher sequence and prefetched by the specified amount. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the sources {@code Publisher} is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the Publisher of SingleSource instances * @param prefetch the number of SingleSources to prefetch from the Publisher * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> concat(Publisher<? extends SingleSource<? extends T>> sources, int prefetch) { ObjectHelper.requireNonNull(sources, "sources is null"); ObjectHelper.verifyPositive(prefetch, "prefetch"); return RxJavaPlugins.onAssembly(new FlowableConcatMapPublisher(sources, SingleInternalHelper.toFlowable(), prefetch, ErrorMode.IMMEDIATE)); } /** * Returns a Flowable that emits the items emitted by two Singles, one after the other. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @return a Flowable that emits items emitted by the two source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return concat(Flowable.fromArray(source1, source2)); } /** * Returns a Flowable that emits the items emitted by three Singles, one after the other. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @param source3 * a Single to be concatenated * @return a Flowable that emits items emitted by the three source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return concat(Flowable.fromArray(source1, source2, source3)); } /** * Returns a Flowable that emits the items emitted by four Singles, one after the other. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T> the common value type * @param source1 * a Single to be concatenated * @param source2 * a Single to be concatenated * @param source3 * a Single to be concatenated * @param source4 * a Single to be concatenated * @return a Flowable that emits items emitted by the four source Singles, one after the other. * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> concat( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return concat(Flowable.fromArray(source1, source2, source3, source4)); } /** * Concatenate the single values, in a non-overlapping fashion, of the SingleSources provided in * an array. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concatArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources the array of SingleSource instances * @return the new Flowable instance * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> concatArray(SingleSource<? extends T>... sources) { return RxJavaPlugins.onAssembly(new FlowableConcatMap(Flowable.fromArray(sources), SingleInternalHelper.toFlowable(), 2, ErrorMode.BOUNDARY)); } /** * Concatenates a sequence of SingleSource eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * source SingleSources. The operator buffers the value emitted by these SingleSources and then drains them * in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure from downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of Single that need to be eagerly concatenated * @return the new Flowable instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatArrayEager(SingleSource<? extends T>... sources) { return Flowable.fromArray(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Concatenates a Publisher sequence of SingleSources eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * emitted source Publishers as they are observed. The operator buffers the values emitted by these * Publishers and then drains them in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>Backpressure is honored towards the downstream and the outer Publisher is * expected to support backpressure. Violating this assumption, the operator will * signal {@link io.reactivex.exceptions.MissingBackpressureException}.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of Publishers that need to be eagerly concatenated * @return the new Publisher instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatEager(Publisher<? extends SingleSource<? extends T>> sources) { return Flowable.fromPublisher(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Concatenates a sequence of SingleSources eagerly into a single stream of values. * <p> * Eager concatenation means that once a subscriber subscribes, this operator subscribes to all of the * source SingleSources. The operator buffers the values emitted by these SingleSources and then drains them * in order, each one after the previous one completes. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>Backpressure is honored towards the downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>This method does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param sources a sequence of SingleSource that need to be eagerly concatenated * @return the new Flowable instance with the specified concatenation behavior */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> concatEager(Iterable<? extends SingleSource<? extends T>> sources) { return Flowable.fromIterable(sources).concatMapEager(SingleInternalHelper.<T>toFlowable()); } /** * Provides an API (via a cold Completable) that bridges the reactive world with the callback-style world. * <p> * Example: * <pre><code> * Single.&lt;Event&gt;create(emitter -&gt; { * Callback listener = new Callback() { * &#64;Override * public void onEvent(Event e) { * emitter.onSuccess(e); * } * * &#64;Override * public void onFailure(Exception e) { * emitter.onError(e); * } * }; * * AutoCloseable c = api.someMethod(listener); * * emitter.setCancellable(c::close); * * }); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code create} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param source the emitter that is called when a SingleObserver subscribes to the returned {@code Single} * @return the new Single instance * @see SingleOnSubscribe * @see Cancellable */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> create(SingleOnSubscribe<T> source) { ObjectHelper.requireNonNull(source, "source is null"); return RxJavaPlugins.onAssembly(new SingleCreate<T>(source)); } /** * Calls a {@link Callable} for each individual {@link SingleObserver} to return the actual {@link SingleSource} to * be subscribed to. * <p> * <img width="640" height="515" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.defer.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code defer} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param singleSupplier the {@code Callable} that is called for each individual {@code SingleObserver} and * returns a SingleSource instance to subscribe to * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> defer(final Callable<? extends SingleSource<? extends T>> singleSupplier) { ObjectHelper.requireNonNull(singleSupplier, "singleSupplier is null"); return RxJavaPlugins.onAssembly(new SingleDefer<T>(singleSupplier)); } /** * Signals a Throwable returned by the callback function for each individual SingleObserver. * <p> * <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.error.c.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code error} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param errorSupplier the callable that is called for each individual SingleObserver and * returns a Throwable instance to be emitted. * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> error(final Callable<? extends Throwable> errorSupplier) { ObjectHelper.requireNonNull(errorSupplier, "errorSupplier is null"); return RxJavaPlugins.onAssembly(new SingleError<T>(errorSupplier)); } /** * Returns a Single that invokes a subscriber's {@link SingleObserver#onError onError} method when the * subscriber subscribes to it. * <p> * <img width="640" height="283" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.error.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code error} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param exception * the particular Throwable to pass to {@link SingleObserver#onError onError} * @param <T> * the type of the item (ostensibly) emitted by the Single * @return a Single that invokes the subscriber's {@link SingleObserver#onError onError} method when * the subscriber subscribes to it * @see <a href="http://reactivex.io/documentation/operators/empty-never-throw.html">ReactiveX operators documentation: Throw</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> error(final Throwable exception) { ObjectHelper.requireNonNull(exception, "error is null"); return error(Functions.justCallable(exception)); } /** * Returns a {@link Single} that invokes passed function and emits its result for each new SingleObserver that subscribes. * <p> * Allows you to defer execution of passed function until SingleObserver subscribes to the {@link Single}. * It makes passed function "lazy". * Result of the function invocation will be emitted by the {@link Single}. * <p> * <img width="640" height="467" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromCallable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromCallable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param callable * function which execution should be deferred, it will be invoked when SingleObserver will subscribe to the {@link Single}. * @param <T> * the type of the item emitted by the {@link Single}. * @return a {@link Single} whose {@link SingleObserver}s' subscriptions trigger an invocation of the given function. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromCallable(final Callable<? extends T> callable) { ObjectHelper.requireNonNull(callable, "callable is null"); return RxJavaPlugins.onAssembly(new SingleFromCallable<T>(callable)); } /** * Converts a {@link Future} into a {@code Single}. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a Single that emits the return * value of the {@link Future#get} method of that object, by passing the object into the {@code from} * method. * <p> * <em>Important note:</em> This Single is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param future * the source {@link Future} * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromFuture(Future<? extends T> future) { return toSingle(Flowable.<T>fromFuture(future)); } /** * Converts a {@link Future} into a {@code Single}, with a timeout on the Future. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <p> * <em>Important note:</em> This {@code Single} is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param future * the source {@link Future} * @param timeout * the maximum time to wait before calling {@code get} * @param unit * the {@link TimeUnit} of the {@code timeout} argument * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromFuture(Future<? extends T> future, long timeout, TimeUnit unit) { return toSingle(Flowable.<T>fromFuture(future, timeout, unit)); } /** * Converts a {@link Future} into a {@code Single}, with a timeout on the Future. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <p> * <em>Important note:</em> This {@code Single} is blocking; you cannot dispose it. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify the {@link Scheduler} where the blocking wait will happen.</dd> * </dl> * * @param future * the source {@link Future} * @param timeout * the maximum time to wait before calling {@code get} * @param unit * the {@link TimeUnit} of the {@code timeout} argument * @param scheduler * the Scheduler to use for the blocking wait * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static <T> Single<T> fromFuture(Future<? extends T> future, long timeout, TimeUnit unit, Scheduler scheduler) { return toSingle(Flowable.<T>fromFuture(future, timeout, unit, scheduler)); } /** * Converts a {@link Future}, operating on a specified {@link Scheduler}, into a {@code Single}. * <p> * <img width="640" height="315" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.from.Future.s.png" alt=""> * <p> * You can convert any object that supports the {@link Future} interface into a {@code Single} that emits * the return value of the {@link Future#get} method of that object, by passing the object into the * {@code from} method. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param future * the source {@link Future} * @param scheduler * the {@link Scheduler} to wait for the Future on. Use a Scheduler such as * {@link Schedulers#io()} that can block and wait on the Future * @param <T> * the type of object that the {@link Future} returns, and also the type of item to be emitted by * the resulting {@code Single} * @return a {@code Single} that emits the item from the source {@link Future} * @see <a href="http://reactivex.io/documentation/operators/from.html">ReactiveX operators documentation: From</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static <T> Single<T> fromFuture(Future<? extends T> future, Scheduler scheduler) { return toSingle(Flowable.<T>fromFuture(future, scheduler)); } /** * Wraps a specific Publisher into a Single and signals its single element or error. * <p>If the source Publisher is empty, a NoSuchElementException is signalled. If * the source has more than one element, an IndexOutOfBoundsException is signalled. * <p> * The {@link Publisher} must follow the * <a href="https://github.com/reactive-streams/reactive-streams-jvm#reactive-streams">Reactive-Streams specification</a>. * Violating the specification may result in undefined behavior. * <p> * If possible, use {@link #create(SingleOnSubscribe)} to create a * source-like {@code Single} instead. * <p> * Note that even though {@link Publisher} appears to be a functional interface, it * is not recommended to implement it through a lambda as the specification requires * state management that is not achievable with a stateless lambda. * <p> * <img width="640" height="322" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromPublisher.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code publisher} is consumed in an unbounded fashion but will be cancelled * if it produced more than one item.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromPublisher} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param publisher the source Publisher instance, not null * @return the new Single instance * @see #create(SingleOnSubscribe) */ @BackpressureSupport(BackpressureKind.UNBOUNDED_IN) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromPublisher(final Publisher<? extends T> publisher) { ObjectHelper.requireNonNull(publisher, "publisher is null"); return RxJavaPlugins.onAssembly(new SingleFromPublisher<T>(publisher)); } /** * Wraps a specific ObservableSource into a Single and signals its single element or error. * <p>If the ObservableSource is empty, a NoSuchElementException is signalled. * If the source has more than one element, an IndexOutOfBoundsException is signalled. * <p> * <img width="640" height="343" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.fromObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code fromObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param observableSource the source Observable, not null * @param <T> * the type of the item emitted by the {@link Single}. * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> fromObservable(ObservableSource<? extends T> observableSource) { ObjectHelper.requireNonNull(observableSource, "observableSource is null"); return RxJavaPlugins.onAssembly(new ObservableSingleSingle<T>(observableSource, null)); } /** * Returns a {@code Single} that emits a specified item. * <p> * <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.just.png" alt=""> * <p> * To convert any object into a {@code Single} that emits that object, pass that object into the * {@code just} method. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code just} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param item * the item to emit * @param <T> * the type of that item * @return a {@code Single} that emits {@code item} * @see <a href="http://reactivex.io/documentation/operators/just.html">ReactiveX operators documentation: Just</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> just(final T item) { ObjectHelper.requireNonNull(item, "value is null"); return RxJavaPlugins.onAssembly(new SingleJust<T>(item)); } /** * Merges an Iterable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(Iterable)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * @param <T> the common and resulting value type * @param sources the Iterable sequence of SingleSource sources * @return the new Flowable instance * @since 2.0 * @see #mergeDelayError(Iterable) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> merge(Iterable<? extends SingleSource<? extends T>> sources) { return merge(Flowable.fromIterable(sources)); } /** * Merges a Flowable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(Publisher)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * @param <T> the common and resulting value type * @param sources the Flowable sequence of SingleSource sources * @return the new Flowable instance * @see #mergeDelayError(Publisher) * @since 2.0 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> merge(Publisher<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new FlowableFlatMapPublisher(sources, SingleInternalHelper.toFlowable(), false, Integer.MAX_VALUE, Flowable.bufferSize())); } /** * Flattens a {@code Single} that emits a {@code Single} into a single {@code Single} that emits the item * emitted by the nested {@code Single}, without any transformation. * <p> * <img width="640" height="370" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.oo.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dd>The resulting {@code Single} emits the outer source's or the inner {@code SingleSource}'s {@code Throwable} as is. * Unlike the other {@code merge()} operators, this operator won't and can't produce a {@code CompositeException} because there is * only one possibility for the outer or the inner {@code SingleSource} to emit an {@code onError} signal. * Therefore, there is no need for a {@code mergeDelayError(SingleSource<SingleSource<T>>)} operator. * </dd> * </dl> * * @param <T> the value type of the sources and the output * @param source * a {@code Single} that emits a {@code Single} * @return a {@code Single} that emits the item that is the result of flattening the {@code Single} emitted * by {@code source} * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Single<T> merge(SingleSource<? extends SingleSource<? extends T>> source) { ObjectHelper.requireNonNull(source, "source is null"); return RxJavaPlugins.onAssembly(new SingleFlatMap<SingleSource<? extends T>, T>(source, (Function)Functions.identity())); } /** * Flattens two Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by * using the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return merge(Flowable.fromArray(source1, source2)); } /** * Flattens three Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return merge(Flowable.fromArray(source1, source2, source3)); } /** * Flattens four Singles into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code merge} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code merge} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If any of the source {@code SingleSource}s signal a {@code Throwable} via {@code onError}, the resulting * {@code Flowable} terminates with that {@code Throwable} and all other source {@code SingleSource}s are cancelled. * If more than one {@code SingleSource} signals an error, the resulting {@code Flowable} may terminate with the * first one's error or, depending on the concurrency of the sources, may terminate with a * {@code CompositeException} containing two or more of the various error signals. * {@code Throwable}s that didn't make into the composite will be sent (individually) to the global error handler via * {@link RxJavaPlugins#onError(Throwable)} method as {@code UndeliverableException} errors. Similarly, {@code Throwable}s * signaled by source(s) after the returned {@code Flowable} has been cancelled or terminated with a * (composite) error will be sent to the same global error handler. * Use {@link #mergeDelayError(SingleSource, SingleSource, SingleSource, SingleSource)} to merge sources and terminate only when all source {@code SingleSource}s * have completed or failed with an error. * </dd> * </dl> * * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @param source4 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #mergeDelayError(SingleSource, SingleSource, SingleSource, SingleSource) */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> merge( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return merge(Flowable.fromArray(source1, source2, source3, source4)); } /** * Merges an Iterable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once and delaying any error(s) until all sources succeed or fail. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common and resulting value type * @param sources the Iterable sequence of SingleSource sources * @return the new Flowable instance * @see #merge(Iterable) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) public static <T> Flowable<T> mergeDelayError(Iterable<? extends SingleSource<? extends T>> sources) { return mergeDelayError(Flowable.fromIterable(sources)); } /** * Merges a Flowable sequence of SingleSource instances into a single Flowable sequence, * running all SingleSources at once and delaying any error(s) until all sources succeed or fail. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common and resulting value type * @param sources the Flowable sequence of SingleSource sources * @return the new Flowable instance * @see #merge(Publisher) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings({ "unchecked", "rawtypes" }) public static <T> Flowable<T> mergeDelayError(Publisher<? extends SingleSource<? extends T>> sources) { ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new FlowableFlatMapPublisher(sources, SingleInternalHelper.toFlowable(), true, Integer.MAX_VALUE, Flowable.bufferSize())); } /** * Flattens two Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by * using the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return mergeDelayError(Flowable.fromArray(source1, source2)); } /** * Flattens three Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return mergeDelayError(Flowable.fromArray(source1, source2, source3)); } /** * Flattens four Singles into a single Flowable, without any transformation, delaying * any error(s) until all sources succeed or fail. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeDelayError} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeDelayError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.9 - experimental * @param <T> the common value type * @param source1 * a SingleSource to be merged * @param source2 * a SingleSource to be merged * @param source3 * a SingleSource to be merged * @param source4 * a SingleSource to be merged * @return a Flowable that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> * @see #merge(SingleSource, SingleSource, SingleSource, SingleSource) * @since 2.2 */ @CheckReturnValue @BackpressureSupport(BackpressureKind.FULL) @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Flowable<T> mergeDelayError( SingleSource<? extends T> source1, SingleSource<? extends T> source2, SingleSource<? extends T> source3, SingleSource<? extends T> source4 ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return mergeDelayError(Flowable.fromArray(source1, source2, source3, source4)); } /** * Returns a singleton instance of a never-signalling Single (only calls onSubscribe). * <p> * <img width="640" height="244" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.never.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code never} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the target value type * @return the singleton never instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T> Single<T> never() { return RxJavaPlugins.onAssembly((Single<T>) SingleNever.INSTANCE); } /** * Signals success with 0L value after the given delay for each SingleObserver. * <p> * <img width="640" height="292" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.timer.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timer} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * @param delay the delay amount * @param unit the time unit of the delay * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public static Single<Long> timer(long delay, TimeUnit unit) { return timer(delay, unit, Schedulers.computation()); } /** * Signals success with 0L value after the given delay for each SingleObserver. * <p> * <img width="640" height="292" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.timer.s.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} to signal on.</dd> * </dl> * @param delay the delay amount * @param unit the time unit of the delay * @param scheduler the scheduler where the single 0L will be emitted * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public static Single<Long> timer(final long delay, final TimeUnit unit, final Scheduler scheduler) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleTimer(delay, unit, scheduler)); } /** * Compares two SingleSources and emits true if they emit the same value (compared via Object.equals). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code equals} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param first the first SingleSource instance * @param second the second SingleSource instance * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<Boolean> equals(final SingleSource<? extends T> first, final SingleSource<? extends T> second) { // NOPMD ObjectHelper.requireNonNull(first, "first is null"); ObjectHelper.requireNonNull(second, "second is null"); return RxJavaPlugins.onAssembly(new SingleEquals<T>(first, second)); } /** * <strong>Advanced use only:</strong> creates a Single instance without * any safeguards by using a callback that is called with a SingleObserver. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code unsafeCreate} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param onSubscribe the function that is called with the subscribing SingleObserver * @return the new Single instance * @throws IllegalArgumentException if {@code source} is a subclass of {@code Single}; such * instances don't need conversion and is possibly a port remnant from 1.x or one should use {@link #hide()} * instead. * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> unsafeCreate(SingleSource<T> onSubscribe) { ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null"); if (onSubscribe instanceof Single) { throw new IllegalArgumentException("unsafeCreate(Single) should be upgraded"); } return RxJavaPlugins.onAssembly(new SingleFromUnsafeSource<T>(onSubscribe)); } /** * Allows using and disposing a resource while running a SingleSource instance generated from * that resource (similar to a try-with-resources). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code using} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type of the SingleSource generated * @param <U> the resource type * @param resourceSupplier the Callable called for each SingleObserver to generate a resource Object * @param singleFunction the function called with the returned resource * Object from {@code resourceSupplier} and should return a SingleSource instance * to be run by the operator * @param disposer the consumer of the generated resource that is called exactly once for * that particular resource when the generated SingleSource terminates * (successfully or with an error) or gets cancelled. * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, U> Single<T> using(Callable<U> resourceSupplier, Function<? super U, ? extends SingleSource<? extends T>> singleFunction, Consumer<? super U> disposer) { return using(resourceSupplier, singleFunction, disposer, true); } /** * Allows using and disposing a resource while running a SingleSource instance generated from * that resource (similar to a try-with-resources). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code using} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type of the SingleSource generated * @param <U> the resource type * @param resourceSupplier the Callable called for each SingleObserver to generate a resource Object * @param singleFunction the function called with the returned resource * Object from {@code resourceSupplier} and should return a SingleSource instance * to be run by the operator * @param disposer the consumer of the generated resource that is called exactly once for * that particular resource when the generated SingleSource terminates * (successfully or with an error) or gets cancelled. * @param eager * if true, the disposer is called before the terminal event is signalled * if false, the disposer is called after the terminal event is delivered to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, U> Single<T> using( final Callable<U> resourceSupplier, final Function<? super U, ? extends SingleSource<? extends T>> singleFunction, final Consumer<? super U> disposer, final boolean eager) { ObjectHelper.requireNonNull(resourceSupplier, "resourceSupplier is null"); ObjectHelper.requireNonNull(singleFunction, "singleFunction is null"); ObjectHelper.requireNonNull(disposer, "disposer is null"); return RxJavaPlugins.onAssembly(new SingleUsing<T, U>(resourceSupplier, singleFunction, disposer, eager)); } /** * Wraps a SingleSource instance into a new Single instance if not already a Single * instance. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code wrap} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the value type * @param source the source to wrap * @return the Single wrapper or the source cast to Single (if possible) */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T> Single<T> wrap(SingleSource<T> source) { ObjectHelper.requireNonNull(source, "source is null"); if (source instanceof Single) { return RxJavaPlugins.onAssembly((Single<T>)source); } return RxJavaPlugins.onAssembly(new SingleFromUnsafeSource<T>(source)); } /** * Waits until all SingleSource sources provided by the Iterable sequence signal a success * value and calls a zipper function with an array of these values to return a result * to be emitted to downstream. * <p> * If the {@code Iterable} of {@link SingleSource}s is empty a {@link NoSuchElementException} error is signalled after subscription. * <p> * Note on method signature: since Java doesn't allow creating a generic array with {@code new T[]}, the * implementation of this operator has to create an {@code Object[]} instead. Unfortunately, a * {@code Function<Integer[], R>} passed to the method would trigger a {@code ClassCastException}. * * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/zip.png" alt=""> * <p> * If any of the SingleSources signal an error, all other SingleSources get cancelled and the * error emitted to downstream immediately. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param <R> the result value type * @param sources the Iterable sequence of SingleSource instances. An empty sequence will result in an * {@code onError} signal of {@link NoSuchElementException}. * @param zipper the function that receives an array with values from each SingleSource * and should return a value to be emitted to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, R> Single<R> zip(final Iterable<? extends SingleSource<? extends T>> sources, Function<? super Object[], ? extends R> zipper) { ObjectHelper.requireNonNull(zipper, "zipper is null"); ObjectHelper.requireNonNull(sources, "sources is null"); return RxJavaPlugins.onAssembly(new SingleZipIterable<T, R>(sources, zipper)); } /** * Returns a Single that emits the results of a specified combiner function applied to two items emitted by * two other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, BiFunction<? super T1, ? super T2, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); return zipArray(Functions.toFunction(zipper), source1, source2); } /** * Returns a Single that emits the results of a specified combiner function applied to three items emitted * by three other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, Function3<? super T1, ? super T2, ? super T3, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3); } /** * Returns a Single that emits the results of a specified combiner function applied to four items * emitted by four other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, Function4<? super T1, ? super T2, ? super T3, ? super T4, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4); } /** * Returns a Single that emits the results of a specified combiner function applied to five items * emitted by five other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, Function5<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5); } /** * Returns a Single that emits the results of a specified combiner function applied to six items * emitted by six other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, Function6<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6); } /** * Returns a Single that emits the results of a specified combiner function applied to seven items * emitted by seven other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, Function7<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7); } /** * Returns a Single that emits the results of a specified combiner function applied to eight items * emitted by eight other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <T8> the eighth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param source8 * an eighth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, T8, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, SingleSource<? extends T8> source8, Function8<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); ObjectHelper.requireNonNull(source8, "source8 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7, source8); } /** * Returns a Single that emits the results of a specified combiner function applied to nine items * emitted by nine other Singles. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zip} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <T1> the first source Single's value type * @param <T2> the second source Single's value type * @param <T3> the third source Single's value type * @param <T4> the fourth source Single's value type * @param <T5> the fifth source Single's value type * @param <T6> the sixth source Single's value type * @param <T7> the seventh source Single's value type * @param <T8> the eighth source Single's value type * @param <T9> the ninth source Single's value type * @param <R> the result value type * @param source1 * the first source Single * @param source2 * a second source Single * @param source3 * a third source Single * @param source4 * a fourth source Single * @param source5 * a fifth source Single * @param source6 * a sixth source Single * @param source7 * a seventh source Single * @param source8 * an eighth source Single * @param source9 * a ninth source Single * @param zipper * a function that, when applied to the item emitted by each of the source Singles, results in an * item that will be emitted by the resulting Single * @return a Single that emits the zipped results * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public static <T1, T2, T3, T4, T5, T6, T7, T8, T9, R> Single<R> zip( SingleSource<? extends T1> source1, SingleSource<? extends T2> source2, SingleSource<? extends T3> source3, SingleSource<? extends T4> source4, SingleSource<? extends T5> source5, SingleSource<? extends T6> source6, SingleSource<? extends T7> source7, SingleSource<? extends T8> source8, SingleSource<? extends T9> source9, Function9<? super T1, ? super T2, ? super T3, ? super T4, ? super T5, ? super T6, ? super T7, ? super T8, ? super T9, ? extends R> zipper ) { ObjectHelper.requireNonNull(source1, "source1 is null"); ObjectHelper.requireNonNull(source2, "source2 is null"); ObjectHelper.requireNonNull(source3, "source3 is null"); ObjectHelper.requireNonNull(source4, "source4 is null"); ObjectHelper.requireNonNull(source5, "source5 is null"); ObjectHelper.requireNonNull(source6, "source6 is null"); ObjectHelper.requireNonNull(source7, "source7 is null"); ObjectHelper.requireNonNull(source8, "source8 is null"); ObjectHelper.requireNonNull(source9, "source9 is null"); return zipArray(Functions.toFunction(zipper), source1, source2, source3, source4, source5, source6, source7, source8, source9); } /** * Waits until all SingleSource sources provided via an array signal a success * value and calls a zipper function with an array of these values to return a result * to be emitted to downstream. * <p> * If the array of {@link SingleSource}s is empty a {@link NoSuchElementException} error is signalled immediately. * <p> * Note on method signature: since Java doesn't allow creating a generic array with {@code new T[]}, the * implementation of this operator has to create an {@code Object[]} instead. Unfortunately, a * {@code Function<Integer[], R>} passed to the method would trigger a {@code ClassCastException}. * * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/zip.png" alt=""> * <p> * If any of the SingleSources signal an error, all other SingleSources get cancelled and the * error emitted to downstream immediately. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zipArray} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <T> the common value type * @param <R> the result value type * @param sources the array of SingleSource instances. An empty sequence will result in an * {@code onError} signal of {@link NoSuchElementException}. * @param zipper the function that receives an array with values from each SingleSource * and should return a value to be emitted to downstream * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public static <T, R> Single<R> zipArray(Function<? super Object[], ? extends R> zipper, SingleSource<? extends T>... sources) { ObjectHelper.requireNonNull(zipper, "zipper is null"); ObjectHelper.requireNonNull(sources, "sources is null"); if (sources.length == 0) { return error(new NoSuchElementException()); } return RxJavaPlugins.onAssembly(new SingleZipArray<T, R>(sources, zipper)); } /** * Signals the event of this or the other SingleSource whichever signals first. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ambWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param other the other SingleSource to race for the first emission of success or error * @return the new Single instance. A subscription to this provided source will occur after subscribing * to the current source. * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Single<T> ambWith(SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return ambArray(this, other); } /** * Calls the specified converter function during assembly time and returns its resulting value. * <p> * This allows fluent conversion to any other type. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code as} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.7 - experimental * @param <R> the resulting object type * @param converter the function that receives the current Single instance and returns a value * @return the converted value * @throws NullPointerException if converter is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> R as(@NonNull SingleConverter<T, ? extends R> converter) { return ObjectHelper.requireNonNull(converter, "converter is null").apply(this); } /** * Hides the identity of the current Single, including the Disposable that is sent * to the downstream via {@code onSubscribe()}. * <p> * <img width="640" height="458" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.hide.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code hide} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> hide() { return RxJavaPlugins.onAssembly(new SingleHide<T>(this)); } /** * Transform a Single by applying a particular Transformer function to it. * <p> * This method operates on the Single itself whereas {@link #lift} operates on the Single's SingleObservers. * <p> * If the operator you are creating is designed to act on the individual item emitted by a Single, use * {@link #lift}. If your operator is designed to transform the source Single as a whole (for instance, by * applying a particular set of existing RxJava operators to it) use {@code compose}. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code compose} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the value type of the single returned by the transformer function * @param transformer the transformer function, not null * @return the source Single, transformed by the transformer function * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a> */ @SuppressWarnings("unchecked") @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> compose(SingleTransformer<? super T, ? extends R> transformer) { return wrap(((SingleTransformer<T, R>) ObjectHelper.requireNonNull(transformer, "transformer is null")).apply(this)); } /** * Stores the success value or exception from the current Single and replays it to late SingleObservers. * <p> * The returned Single subscribes to the current Single when the first SingleObserver subscribes. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code cache} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> cache() { return RxJavaPlugins.onAssembly(new SingleCache<T>(this)); } /** * Casts the success value of the current Single into the target type or signals a * ClassCastException if not compatible. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code cast} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the target type * @param clazz the type token to use for casting the success result from the current Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<U> cast(final Class<? extends U> clazz) { ObjectHelper.requireNonNull(clazz, "clazz is null"); return map(Functions.castFunction(clazz)); } /** * Returns a Flowable that emits the item emitted by the source Single, then the item emitted by the * specified Single. * <p> * <img width="640" height="335" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.concatWith.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code concatWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * a Single to be concatenated after the current * @return a Flowable that emits the item emitted by the source Single, followed by the item emitted by * {@code t1} * @see <a href="http://reactivex.io/documentation/operators/concat.html">ReactiveX operators documentation: Concat</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> concatWith(SingleSource<? extends T> other) { return concat(this, other); } /** * Delays the emission of the success signal from the current Single by the specified amount. * An error signal will not be delayed. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * * @param time the amount of time the success signal should be delayed for * @param unit the time unit * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delay(long time, TimeUnit unit) { return delay(time, unit, Schedulers.computation(), false); } /** * Delays the emission of the success or error signal from the current Single by the specified amount. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.e.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delay} operates by default on the {@code computation} {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.5 - experimental * @param time the amount of time the success or error signal should be delayed for * @param unit the time unit * @param delayError if true, both success and error signals are delayed. if false, only success signals are delayed. * @return the new Single instance * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delay(long time, TimeUnit unit, boolean delayError) { return delay(time, unit, Schedulers.computation(), delayError); } /** * Delays the emission of the success signal from the current Single by the specified amount. * An error signal will not be delayed. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.s.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd> * </dl> * * @param time the amount of time the success signal should be delayed for * @param unit the time unit * @param scheduler the target scheduler to use for the non-blocking wait and emission * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delay(final long time, final TimeUnit unit, final Scheduler scheduler) { return delay(time, unit, scheduler, false); } /** * Delays the emission of the success or error signal from the current Single by the specified amount. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.delay.se.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify the {@link Scheduler} where the non-blocking wait and emission happens</dd> * </dl> * <p>History: 2.1.5 - experimental * @param time the amount of time the success or error signal should be delayed for * @param unit the time unit * @param scheduler the target scheduler to use for the non-blocking wait and emission * @param delayError if true, both success and error signals are delayed. if false, only success signals are delayed. * @return the new Single instance * @throws NullPointerException * if unit is null, or * if scheduler is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delay(final long time, final TimeUnit unit, final Scheduler scheduler, boolean delayError) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleDelay<T>(this, time, unit, scheduler, delayError)); } /** * Delays the actual subscription to the current Single until the given other CompletableSource * completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param other the CompletableSource that has to complete before the subscription to the * current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> delaySubscription(CompletableSource other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithCompletable<T>(this, other)); } /** * Delays the actual subscription to the current Single until the given other SingleSource * signals success. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the SingleSource that has to complete before the subscription to the * current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(SingleSource<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithSingle<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given other ObservableSource * signals its first value or completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the ObservableSource that has to signal a value or complete before the * subscription to the current Single happens * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(ObservableSource<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithObservable<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given other Publisher * signals its first value or completes. * <p>If the delaying source signals an error, that error is re-emitted and no subscription * to the current Single happens. * <p>The other source is consumed in an unbounded manner (requesting Long.MAX_VALUE from it). * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code other} publisher is consumed in an unbounded fashion but will be * cancelled after the first item it produced.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <U> the element type of the other source * @param other the Publisher that has to signal a value or complete before the * subscription to the current Single happens * @return the new Single instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Single<T> delaySubscription(Publisher<U> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleDelayWithPublisher<T, U>(this, other)); } /** * Delays the actual subscription to the current Single until the given time delay elapsed. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does by default subscribe to the current Single * on the {@code computation} {@link Scheduler} after the delay.</dd> * </dl> * @param time the time amount to wait with the subscription * @param unit the time unit of the waiting * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> delaySubscription(long time, TimeUnit unit) { return delaySubscription(time, unit, Schedulers.computation()); } /** * Delays the actual subscription to the current Single until the given time delay elapsed. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code delaySubscription} does by default subscribe to the current Single * on the {@link Scheduler} you provided, after the delay.</dd> * </dl> * @param time the time amount to wait with the subscription * @param unit the time unit of the waiting * @param scheduler the scheduler to wait on and subscribe on to the current Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> delaySubscription(long time, TimeUnit unit, Scheduler scheduler) { return delaySubscription(Observable.timer(time, unit, scheduler)); } /** * Calls the specified consumer with the success item after this item has been emitted to the downstream. * <p>Note that the {@code doAfterSuccess} action is shared between subscriptions and as such * should be thread-safe. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doAfterSuccess} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.1 - experimental * @param onAfterSuccess the Consumer that will be called after emitting an item from upstream to the downstream * @return the new Single instance * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doAfterSuccess(Consumer<? super T> onAfterSuccess) { ObjectHelper.requireNonNull(onAfterSuccess, "doAfterSuccess is null"); return RxJavaPlugins.onAssembly(new SingleDoAfterSuccess<T>(this, onAfterSuccess)); } /** * Registers an {@link Action} to be called after this Single invokes either onSuccess or onError. * * <p>Note that the {@code doAfterTerminate} action is shared between subscriptions and as such * should be thread-safe.</p> * <p> * <img width="640" height="310" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/doAfterTerminate.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doAfterTerminate} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * <p>History: 2.0.6 - experimental * @param onAfterTerminate * an {@link Action} to be invoked when the source Single finishes * @return a Single that emits the same items as the source Single, then invokes the * {@link Action} * @see <a href="http://reactivex.io/documentation/operators/do.html">ReactiveX operators documentation: Do</a> * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doAfterTerminate(Action onAfterTerminate) { ObjectHelper.requireNonNull(onAfterTerminate, "onAfterTerminate is null"); return RxJavaPlugins.onAssembly(new SingleDoAfterTerminate<T>(this, onAfterTerminate)); } /** * Calls the specified action after this Single signals onSuccess or onError or gets disposed by * the downstream. * <p>In case of a race between a terminal event and a dispose call, the provided {@code onFinally} action * is executed once per subscription. * <p>Note that the {@code onFinally} action is shared between subscriptions and as such * should be thread-safe. * <p> * <img width="640" height="291" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doFinally.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doFinally} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.0.1 - experimental * @param onFinally the action called when this Single terminates or gets cancelled * @return the new Single instance * @since 2.1 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doFinally(Action onFinally) { ObjectHelper.requireNonNull(onFinally, "onFinally is null"); return RxJavaPlugins.onAssembly(new SingleDoFinally<T>(this, onFinally)); } /** * Calls the shared consumer with the Disposable sent through the onSubscribe for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="347" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnSubscribe.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnSubscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onSubscribe the consumer called with the Disposable sent via onSubscribe * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnSubscribe(final Consumer<? super Disposable> onSubscribe) { ObjectHelper.requireNonNull(onSubscribe, "onSubscribe is null"); return RxJavaPlugins.onAssembly(new SingleDoOnSubscribe<T>(this, onSubscribe)); } /** * Calls the shared consumer with the success value sent via onSuccess for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="347" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnSuccess.2.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnSuccess} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onSuccess the consumer called with the success value of onSuccess * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnSuccess(final Consumer<? super T> onSuccess) { ObjectHelper.requireNonNull(onSuccess, "onSuccess is null"); return RxJavaPlugins.onAssembly(new SingleDoOnSuccess<T>(this, onSuccess)); } /** * Calls the shared consumer with the error sent via onError or the value * via onSuccess for each SingleObserver that subscribes to the current Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnEvent} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onEvent the consumer called with the success value of onEvent * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnEvent(final BiConsumer<? super T, ? super Throwable> onEvent) { ObjectHelper.requireNonNull(onEvent, "onEvent is null"); return RxJavaPlugins.onAssembly(new SingleDoOnEvent<T>(this, onEvent)); } /** * Calls the shared consumer with the error sent via onError for each * SingleObserver that subscribes to the current Single. * <p> * <img width="640" height="349" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnError.2.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnError} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onError the consumer called with the success value of onError * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnError(final Consumer<? super Throwable> onError) { ObjectHelper.requireNonNull(onError, "onError is null"); return RxJavaPlugins.onAssembly(new SingleDoOnError<T>(this, onError)); } /** * Calls the shared {@code Action} if a SingleObserver subscribed to the current Single * disposes the common Disposable it received via onSubscribe. * <p> * <img width="640" height="332" src="https://raw.githubusercontent.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.doOnDispose.png" alt=""> * </p> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code doOnDispose} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param onDispose the action called when the subscription is disposed * @return the new Single instance * @throws NullPointerException if onDispose is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> doOnDispose(final Action onDispose) { ObjectHelper.requireNonNull(onDispose, "onDispose is null"); return RxJavaPlugins.onAssembly(new SingleDoOnDispose<T>(this, onDispose)); } /** * Filters the success item of the Single via a predicate function and emitting it if the predicate * returns true, completing otherwise. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.filter.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code filter} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param predicate * a function that evaluates the item emitted by the source Maybe, returning {@code true} * if it passes the filter * @return a Maybe that emit the item emitted by the source Maybe that the filter * evaluates as {@code true} * @see <a href="http://reactivex.io/documentation/operators/filter.html">ReactiveX operators documentation: Filter</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Maybe<T> filter(Predicate<? super T> predicate) { ObjectHelper.requireNonNull(predicate, "predicate is null"); return RxJavaPlugins.onAssembly(new MaybeFilterSingle<T>(this, predicate)); } /** * Returns a Single that is based on applying a specified function to the item emitted by the source Single, * where that function returns a SingleSource. * <p> * <img width="640" height="300" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMap.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMap} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a SingleSource * @return the Single returned from {@code mapper} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> flatMap(Function<? super T, ? extends SingleSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMap<T, R>(this, mapper)); } /** * Returns a Maybe that is based on applying a specified function to the item emitted by the source Single, * where that function returns a MaybeSource. * <p> * <img width="640" height="191" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapMaybe.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapMaybe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a MaybeSource * @return the Maybe returned from {@code mapper} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Maybe<R> flatMapMaybe(final Function<? super T, ? extends MaybeSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapMaybe<T, R>(this, mapper)); } /** * Returns a Flowable that emits items based on applying a specified function to the item emitted by the * source Single, where that function returns a Publisher. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapPublisher.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer * and the {@code Publisher} returned by the mapper function is expected to honor it as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapPublisher} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns a * Flowable * @return the Flowable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Flowable<R> flatMapPublisher(Function<? super T, ? extends Publisher<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapPublisher<T, R>(this, mapper)); } /** * Returns a Flowable that merges each item emitted by the source Single with the values in an * Iterable corresponding to that item that is generated by a selector. * <p> * <img width="640" height="373" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flattenAsFlowable.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The operator honors backpressure from downstream.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code flattenAsFlowable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of item emitted by the resulting Iterable * @param mapper * a function that returns an Iterable sequence of values for when given an item emitted by the * source Single * @return the new Flowable instance * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Flowable<U> flattenAsFlowable(final Function<? super T, ? extends Iterable<? extends U>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapIterableFlowable<T, U>(this, mapper)); } /** * Returns an Observable that maps a success value into an Iterable and emits its items. * <p> * <img width="640" height="373" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/flattenAsObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flattenAsObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of item emitted by the resulting Iterable * @param mapper * a function that returns an Iterable sequence of values for when given an item emitted by the * source Single * @return the new Observable instance * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U> Observable<U> flattenAsObservable(final Function<? super T, ? extends Iterable<? extends U>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapIterableObservable<T, U>(this, mapper)); } /** * Returns an Observable that is based on applying a specified function to the item emitted by the source Single, * where that function returns an ObservableSource. * <p> * <img width="640" height="300" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function that, when applied to the item emitted by the source Single, returns an ObservableSource * @return the Observable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Observable<R> flatMapObservable(Function<? super T, ? extends ObservableSource<? extends R>> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapObservable<T, R>(this, mapper)); } /** * Returns a {@link Completable} that completes based on applying a specified function to the item emitted by the * source {@link Single}, where that function returns a {@link Completable}. * <p> * <img width="640" height="267" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.flatMapCompletable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code flatMapCompletable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param mapper * a function that, when applied to the item emitted by the source Single, returns a * Completable * @return the Completable returned from {@code func} when applied to the item emitted by the source Single * @see <a href="http://reactivex.io/documentation/operators/flatmap.html">ReactiveX operators documentation: FlatMap</a> * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Completable flatMapCompletable(final Function<? super T, ? extends CompletableSource> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleFlatMapCompletable<T>(this, mapper)); } /** * Waits in a blocking fashion until the current Single signals a success value (which is returned) or * an exception (which is propagated). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code blockingGet} does not operate by default on a particular {@link Scheduler}.</dd> * <dt><b>Error handling:</b></dt> * <dd>If the source signals an error, the operator wraps a checked {@link Exception} * into {@link RuntimeException} and throws that. Otherwise, {@code RuntimeException}s and * {@link Error}s are rethrown as they are.</dd> * </dl> * @return the success value */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final T blockingGet() { BlockingMultiObserver<T> observer = new BlockingMultiObserver<T>(); subscribe(observer); return observer.blockingGet(); } /** * <strong>This method requires advanced knowledge about building operators, please consider * other standard composition methods first;</strong> * Returns a {@code Single} which, when subscribed to, invokes the {@link SingleOperator#apply(SingleObserver) apply(SingleObserver)} method * of the provided {@link SingleOperator} for each individual downstream {@link Single} and allows the * insertion of a custom operator by accessing the downstream's {@link SingleObserver} during this subscription phase * and providing a new {@code SingleObserver}, containing the custom operator's intended business logic, that will be * used in the subscription process going further upstream. * <p> * Generally, such a new {@code SingleObserver} will wrap the downstream's {@code SingleObserver} and forwards the * {@code onSuccess} and {@code onError} events from the upstream directly or according to the * emission pattern the custom operator's business logic requires. In addition, such operator can intercept the * flow control calls of {@code dispose} and {@code isDisposed} that would have traveled upstream and perform * additional actions depending on the same business logic requirements. * <p> * Example: * <pre><code> * // Step 1: Create the consumer type that will be returned by the SingleOperator.apply(): * * public final class CustomSingleObserver&lt;T&gt; implements SingleObserver&lt;T&gt;, Disposable { * * // The downstream's SingleObserver that will receive the onXXX events * final SingleObserver&lt;? super String&gt; downstream; * * // The connection to the upstream source that will call this class' onXXX methods * Disposable upstream; * * // The constructor takes the downstream subscriber and usually any other parameters * public CustomSingleObserver(SingleObserver&lt;? super String&gt; downstream) { * this.downstream = downstream; * } * * // In the subscription phase, the upstream sends a Disposable to this class * // and subsequently this class has to send a Disposable to the downstream. * // Note that relaying the upstream's Disposable directly is not allowed in RxJava * &#64;Override * public void onSubscribe(Disposable s) { * if (upstream != null) { * s.cancel(); * } else { * upstream = s; * downstream.onSubscribe(this); * } * } * * // The upstream calls this with the next item and the implementation's * // responsibility is to emit an item to the downstream based on the intended * // business logic, or if it can't do so for the particular item, * // request more from the upstream * &#64;Override * public void onSuccess(T item) { * String str = item.toString(); * if (str.length() &lt; 2) { * downstream.onSuccess(str); * } else { * // Single is usually expected to produce one of the onXXX events * downstream.onError(new NoSuchElementException()); * } * } * * // Some operators may handle the upstream's error while others * // could just forward it to the downstream. * &#64;Override * public void onError(Throwable throwable) { * downstream.onError(throwable); * } * * // Some operators may use their own resources which should be cleaned up if * // the downstream disposes the flow before it completed. Operators without * // resources can simply forward the dispose to the upstream. * // In some cases, a disposed flag may be set by this method so that other parts * // of this class may detect the dispose and stop sending events * // to the downstream. * &#64;Override * public void dispose() { * upstream.dispose(); * } * * // Some operators may simply forward the call to the upstream while others * // can return the disposed flag set in dispose(). * &#64;Override * public boolean isDisposed() { * return upstream.isDisposed(); * } * } * * // Step 2: Create a class that implements the SingleOperator interface and * // returns the custom consumer type from above in its apply() method. * // Such class may define additional parameters to be submitted to * // the custom consumer type. * * final class CustomSingleOperator&lt;T&gt; implements SingleOperator&lt;String&gt; { * &#64;Override * public SingleObserver&lt;? super String&gt; apply(SingleObserver&lt;? super T&gt; upstream) { * return new CustomSingleObserver&lt;T&gt;(upstream); * } * } * * // Step 3: Apply the custom operator via lift() in a flow by creating an instance of it * // or reusing an existing one. * * Single.just(5) * .lift(new CustomSingleOperator&lt;Integer&gt;()) * .test() * .assertResult("5"); * * Single.just(15) * .lift(new CustomSingleOperator&lt;Integer&gt;()) * .test() * .assertFailure(NoSuchElementException.class); * </code></pre> * <p> * Creating custom operators can be complicated and it is recommended one consults the * <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">RxJava wiki: Writing operators</a> page about * the tools, requirements, rules, considerations and pitfalls of implementing them. * <p> * Note that implementing custom operators via this {@code lift()} method adds slightly more overhead by requiring * an additional allocation and indirection per assembled flows. Instead, extending the abstract {@code Single} * class and creating a {@link SingleTransformer} with it is recommended. * <p> * Note also that it is not possible to stop the subscription phase in {@code lift()} as the {@code apply()} method * requires a non-null {@code SingleObserver} instance to be returned, which is then unconditionally subscribed to * the upstream {@code Single}. For example, if the operator decided there is no reason to subscribe to the * upstream source because of some optimization possibility or a failure to prepare the operator, it still has to * return a {@code SingleObserver} that should immediately dispose the upstream's {@code Disposable} in its * {@code onSubscribe} method. Again, using a {@code SingleTransformer} and extending the {@code Single} is * a better option as {@link #subscribeActual} can decide to not subscribe to its upstream after all. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code lift} does not operate by default on a particular {@link Scheduler}, however, the * {@link SingleOperator} may use a {@code Scheduler} to support its own asynchronous behavior.</dd> * </dl> * * @param <R> the output value type * @param lift the {@link SingleOperator} that receives the downstream's {@code SingleObserver} and should return * a {@code SingleObserver} with custom behavior to be used as the consumer for the current * {@code Single}. * @return the new Single instance * @see <a href="https://github.com/ReactiveX/RxJava/wiki/Writing-operators-for-2.0">RxJava wiki: Writing operators</a> * @see #compose(SingleTransformer) */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> lift(final SingleOperator<? extends R, ? super T> lift) { ObjectHelper.requireNonNull(lift, "onLift is null"); return RxJavaPlugins.onAssembly(new SingleLift<T, R>(this, lift)); } /** * Returns a Single that applies a specified function to the item emitted by the source Single and * emits the result of this function application. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.map.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code map} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <R> the result value type * @param mapper * a function to apply to the item emitted by the Single * @return a Single that emits the item from the source Single, transformed by the specified function * @see <a href="http://reactivex.io/documentation/operators/map.html">ReactiveX operators documentation: Map</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> Single<R> map(Function<? super T, ? extends R> mapper) { ObjectHelper.requireNonNull(mapper, "mapper is null"); return RxJavaPlugins.onAssembly(new SingleMap<T, R>(this, mapper)); } /** * Signals true if the current Single signals a success value that is Object-equals with the value * provided. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code contains} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to compare against the success value of this Single * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<Boolean> contains(Object value) { return contains(value, ObjectHelper.equalsPredicate()); } /** * Signals true if the current Single signals a success value that is equal with * the value provided by calling a bi-predicate. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code contains} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to compare against the success value of this Single * @param comparer the function that receives the success value of this Single, the value provided * and should return true if they are considered equal * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<Boolean> contains(final Object value, final BiPredicate<Object, Object> comparer) { ObjectHelper.requireNonNull(value, "value is null"); ObjectHelper.requireNonNull(comparer, "comparer is null"); return RxJavaPlugins.onAssembly(new SingleContains<T>(this, value, comparer)); } /** * Flattens this and another Single into a single Flowable, without any transformation. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.merge.png" alt=""> * <p> * You can combine items emitted by multiple Singles so that they appear as a single Flowable, by using * the {@code mergeWith} method. * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code mergeWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * a SingleSource to be merged * @return that emits all of the items emitted by the source Singles * @see <a href="http://reactivex.io/documentation/operators/merge.html">ReactiveX operators documentation: Merge</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> mergeWith(SingleSource<? extends T> other) { return merge(this, other); } /** * Modifies a Single to emit its item (or notify of its error) on a specified {@link Scheduler}, * asynchronously. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.observeOn.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>you specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param scheduler * the {@link Scheduler} to notify subscribers on * @return the source Single modified so that its subscribers are notified on the specified * {@link Scheduler} * @throws NullPointerException if scheduler is null * @see <a href="http://reactivex.io/documentation/operators/observeon.html">ReactiveX operators documentation: ObserveOn</a> * @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a> * @see #subscribeOn */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> observeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleObserveOn<T>(this, scheduler)); } /** * Instructs a Single to emit an item (returned by a specified function) rather than invoking * {@link SingleObserver#onError onError} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorReturn.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to its * subscriber, the Single invokes its subscriber's {@link SingleObserver#onError} method, and then quits * without invoking any more of its subscriber's methods. The {@code onErrorReturn} method changes this * behavior. If you pass a function ({@code resumeFunction}) to a Single's {@code onErrorReturn} method, if * the original Single encounters an error, instead of invoking its subscriber's * {@link SingleObserver#onError} method, it will instead emit the return value of {@code resumeFunction}. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorReturn} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeFunction * a function that returns an item that the new Single will emit if the source Single encounters * an error * @return the original Single with appropriately modified behavior * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorReturn(final Function<Throwable, ? extends T> resumeFunction) { ObjectHelper.requireNonNull(resumeFunction, "resumeFunction is null"); return RxJavaPlugins.onAssembly(new SingleOnErrorReturn<T>(this, resumeFunction, null)); } /** * Signals the specified value as success in case the current Single signals an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorReturnItem.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorReturnItem} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param value the value to signal if the current Single fails * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorReturnItem(final T value) { ObjectHelper.requireNonNull(value, "value is null"); return RxJavaPlugins.onAssembly(new SingleOnErrorReturn<T>(this, null, value)); } /** * Instructs a Single to pass control to another Single rather than invoking * {@link SingleObserver#onError(Throwable)} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorResumeNext.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to * its {@link SingleObserver}, the Single invokes its SingleObserver's {@code onError} method, and then quits * without invoking any more of its SingleObserver's methods. The {@code onErrorResumeNext} method changes this * behavior. If you pass another Single ({@code resumeSingleInCaseOfError}) to a Single's * {@code onErrorResumeNext} method, if the original Single encounters an error, instead of invoking its * SingleObserver's {@code onError} method, it will instead relinquish control to {@code resumeSingleInCaseOfError} which * will invoke the SingleObserver's {@link SingleObserver#onSuccess onSuccess} method if it is able to do so. In such a case, * because no Single necessarily invokes {@code onError}, the SingleObserver may never know that an error * happened. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorResumeNext} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeSingleInCaseOfError a Single that will take control if source Single encounters an error. * @return the original Single, with appropriately modified behavior. * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorResumeNext(final Single<? extends T> resumeSingleInCaseOfError) { ObjectHelper.requireNonNull(resumeSingleInCaseOfError, "resumeSingleInCaseOfError is null"); return onErrorResumeNext(Functions.justFunction(resumeSingleInCaseOfError)); } /** * Instructs a Single to pass control to another Single rather than invoking * {@link SingleObserver#onError(Throwable)} if it encounters an error. * <p> * <img width="640" height="451" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.onErrorResumeNext.f.png" alt=""> * <p> * By default, when a Single encounters an error that prevents it from emitting the expected item to * its {@link SingleObserver}, the Single invokes its SingleObserver's {@code onError} method, and then quits * without invoking any more of its SingleObserver's methods. The {@code onErrorResumeNext} method changes this * behavior. If you pass a function that will return another Single ({@code resumeFunctionInCaseOfError}) to a Single's * {@code onErrorResumeNext} method, if the original Single encounters an error, instead of invoking its * SingleObserver's {@code onError} method, it will instead relinquish control to {@code resumeSingleInCaseOfError} which * will invoke the SingleObserver's {@link SingleObserver#onSuccess onSuccess} method if it is able to do so. In such a case, * because no Single necessarily invokes {@code onError}, the SingleObserver may never know that an error * happened. * <p> * You can use this to prevent errors from propagating or to supply fallback data should errors be * encountered. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onErrorResumeNext} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param resumeFunctionInCaseOfError a function that returns a Single that will take control if source Single encounters an error. * @return the original Single, with appropriately modified behavior. * @see <a href="http://reactivex.io/documentation/operators/catch.html">ReactiveX operators documentation: Catch</a> * @since .20 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onErrorResumeNext( final Function<? super Throwable, ? extends SingleSource<? extends T>> resumeFunctionInCaseOfError) { ObjectHelper.requireNonNull(resumeFunctionInCaseOfError, "resumeFunctionInCaseOfError is null"); return RxJavaPlugins.onAssembly(new SingleResumeNext<T>(this, resumeFunctionInCaseOfError)); } /** * Nulls out references to the upstream producer and downstream SingleObserver if * the sequence is terminated or downstream calls dispose(). * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code onTerminateDetach} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.5 - experimental * @return a Single which nulls out references to the upstream producer and downstream SingleObserver if * the sequence is terminated or downstream calls dispose() * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> onTerminateDetach() { return RxJavaPlugins.onAssembly(new SingleDetach<T>(this)); } /** * Repeatedly re-subscribes to the current Single and emits each success value. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeat() { return toFlowable().repeat(); } /** * Re-subscribes to the current Single at most the given number of times and emits each success value. * <p> * <img width="640" height="457" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeat.n.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeat} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param times the number of times to re-subscribe to the current Single * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeat(long times) { return toFlowable().repeat(times); } /** * Re-subscribes to the current Single if * the Publisher returned by the handler function signals a value in response to a * value signalled through the Flowable the handle receives. * <p> * <img width="640" height="1478" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeatWhen.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer. * The {@code Publisher} returned by the handler function is expected to honor backpressure as well.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeatWhen} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param handler the function that is called with a Flowable that signals a value when the Single * signalled a success value and returns a Publisher that has to signal a value to * trigger a resubscription to the current Single, otherwise the terminal signal of * the Publisher will be the terminal signal of the sequence as well. * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeatWhen(Function<? super Flowable<Object>, ? extends Publisher<?>> handler) { return toFlowable().repeatWhen(handler); } /** * Re-subscribes to the current Single until the given BooleanSupplier returns true. * <p> * <img width="640" height="463" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.repeatUntil.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code repeatUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param stop the BooleanSupplier called after the current Single succeeds and if returns false, * the Single is re-subscribed; otherwise the sequence completes. * @return the new Flowable instance * @since 2.0 */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Flowable<T> repeatUntil(BooleanSupplier stop) { return toFlowable().repeatUntil(stop); } /** * Repeatedly re-subscribes to the current Single indefinitely if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry() { return toSingle(toFlowable().retry()); } /** * Repeatedly re-subscribe at most the specified times to the current Single * if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param times the number of times to resubscribe if the current Single fails * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(long times) { return toSingle(toFlowable().retry(times)); } /** * Re-subscribe to the current Single if the given predicate returns true when the Single fails * with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param predicate the predicate called with the resubscription count and the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(BiPredicate<? super Integer, ? super Throwable> predicate) { return toSingle(toFlowable().retry(predicate)); } /** * Repeatedly re-subscribe at most times or until the predicate returns false, whichever happens first * if it fails with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * <p>History: 2.1.8 - experimental * @param times the number of times to resubscribe if the current Single fails * @param predicate the predicate called with the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(long times, Predicate<? super Throwable> predicate) { return toSingle(toFlowable().retry(times, predicate)); } /** * Re-subscribe to the current Single if the given predicate returns true when the Single fails * with an onError. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retry} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param predicate the predicate called with the failure Throwable * and should return true if a resubscription should happen * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retry(Predicate<? super Throwable> predicate) { return toSingle(toFlowable().retry(predicate)); } /** * Re-subscribes to the current Single if and when the Publisher returned by the handler * function signals a value. * <p> * If the Publisher signals an onComplete, the resulting Single will signal a NoSuchElementException. * <p> * Note that the inner {@code Publisher} returned by the handler function should signal * either {@code onNext}, {@code onError} or {@code onComplete} in response to the received * {@code Throwable} to indicate the operator should retry or terminate. If the upstream to * the operator is asynchronous, signalling onNext followed by onComplete immediately may * result in the sequence to be completed immediately. Similarly, if this inner * {@code Publisher} signals {@code onError} or {@code onComplete} while the upstream is * active, the sequence is terminated with the same signal immediately. * <p> * The following example demonstrates how to retry an asynchronous source with a delay: * <pre><code> * Single.timer(1, TimeUnit.SECONDS) * .doOnSubscribe(s -&gt; System.out.println("subscribing")) * .map(v -&gt; { throw new RuntimeException(); }) * .retryWhen(errors -&gt; { * AtomicInteger counter = new AtomicInteger(); * return errors * .takeWhile(e -&gt; counter.getAndIncrement() != 3) * .flatMap(e -&gt; { * System.out.println("delay retry by " + counter.get() + " second(s)"); * return Flowable.timer(counter.get(), TimeUnit.SECONDS); * }); * }) * .blockingGet(); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code retryWhen} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param handler the function that receives a Flowable of the error the Single emits and should * return a Publisher that should signal a normal value (in response to the * throwable the Flowable emits) to trigger a resubscription or signal an error to * be the output of the resulting Single * @return the new Single instance */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> retryWhen(Function<? super Flowable<Throwable>, ? extends Publisher<?>> handler) { return toSingle(toFlowable().retryWhen(handler)); } /** * Subscribes to a Single but ignore its emission or notification. * <p> * If the Single emits an error, it is wrapped into an * {@link io.reactivex.exceptions.OnErrorNotImplementedException OnErrorNotImplementedException} * and routed to the RxJavaPlugins.onError handler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> */ @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe() { return subscribe(Functions.emptyConsumer(), Functions.ON_ERROR_MISSING); } /** * Subscribes to a Single and provides a composite callback to handle the item it emits * or any error notification it issues. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onCallback * the callback that receives either the success value or the failure Throwable * (whichever is not null) * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> * @throws NullPointerException * if {@code onCallback} is null */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(final BiConsumer<? super T, ? super Throwable> onCallback) { ObjectHelper.requireNonNull(onCallback, "onCallback is null"); BiConsumerSingleObserver<T> observer = new BiConsumerSingleObserver<T>(onCallback); subscribe(observer); return observer; } /** * Subscribes to a Single and provides a callback to handle the item it emits. * <p> * If the Single emits an error, it is wrapped into an * {@link io.reactivex.exceptions.OnErrorNotImplementedException OnErrorNotImplementedException} * and routed to the RxJavaPlugins.onError handler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onSuccess * the {@code Consumer<T>} you have designed to accept the emission from the Single * @return a {@link Disposable} reference can request the {@link Single} stop work. * @throws NullPointerException * if {@code onSuccess} is null * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(Consumer<? super T> onSuccess) { return subscribe(onSuccess, Functions.ON_ERROR_MISSING); } /** * Subscribes to a Single and provides callbacks to handle the item it emits or any error notification it * issues. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param onSuccess * the {@code Consumer<T>} you have designed to accept the emission from the Single * @param onError * the {@code Consumer<Throwable>} you have designed to accept any error notification from the * Single * @return a {@link Disposable} reference can request the {@link Single} stop work. * @see <a href="http://reactivex.io/documentation/operators/subscribe.html">ReactiveX operators documentation: Subscribe</a> * @throws NullPointerException * if {@code onSuccess} is null, or * if {@code onError} is null */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Disposable subscribe(final Consumer<? super T> onSuccess, final Consumer<? super Throwable> onError) { ObjectHelper.requireNonNull(onSuccess, "onSuccess is null"); ObjectHelper.requireNonNull(onError, "onError is null"); ConsumerSingleObserver<T> observer = new ConsumerSingleObserver<T>(onSuccess, onError); subscribe(observer); return observer; } @SchedulerSupport(SchedulerSupport.NONE) @Override public final void subscribe(SingleObserver<? super T> observer) { ObjectHelper.requireNonNull(observer, "subscriber is null"); observer = RxJavaPlugins.onSubscribe(this, observer); ObjectHelper.requireNonNull(observer, "subscriber returned by the RxJavaPlugins hook is null"); try { subscribeActual(observer); } catch (NullPointerException ex) { throw ex; } catch (Throwable ex) { Exceptions.throwIfFatal(ex); NullPointerException npe = new NullPointerException("subscribeActual failed"); npe.initCause(ex); throw npe; } } /** * Implement this method in subclasses to handle the incoming {@link SingleObserver}s. * <p>There is no need to call any of the plugin hooks on the current {@code Single} instance or * the {@code SingleObserver}; all hooks and basic safeguards have been * applied by {@link #subscribe(SingleObserver)} before this method gets called. * @param observer the SingleObserver to handle, not null */ protected abstract void subscribeActual(@NonNull SingleObserver<? super T> observer); /** * Subscribes a given SingleObserver (subclass) to this Single and returns the given * SingleObserver as is. * <p>Usage example: * <pre><code> * Single&lt;Integer&gt; source = Single.just(1); * CompositeDisposable composite = new CompositeDisposable(); * * DisposableSingleObserver&lt;Integer&gt; ds = new DisposableSingleObserver&lt;&gt;() { * // ... * }; * * composite.add(source.subscribeWith(ds)); * </code></pre> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code subscribeWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <E> the type of the SingleObserver to use and return * @param observer the SingleObserver (subclass) to use and return, not null * @return the input {@code observer} * @throws NullPointerException if {@code observer} is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E extends SingleObserver<? super T>> E subscribeWith(E observer) { subscribe(observer); return observer; } /** * Asynchronously subscribes subscribers to this Single on the specified {@link Scheduler}. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.subscribeOn.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>You specify which {@link Scheduler} this operator will use.</dd> * </dl> * * @param scheduler * the {@link Scheduler} to perform subscription actions on * @return the source Single modified so that its subscriptions happen on the specified {@link Scheduler} * @see <a href="http://reactivex.io/documentation/operators/subscribeon.html">ReactiveX operators documentation: SubscribeOn</a> * @see <a href="http://www.grahamlea.com/2014/07/rxjava-threading-examples/">RxJava Threading Examples</a> * @see #observeOn */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> subscribeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleSubscribeOn<T>(this, scheduler)); } /** * Returns a Single that emits the item emitted by the source Single until a Completable terminates. Upon * termination of {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Completable whose termination will cause {@code takeUntil} to emit the item from the source * Single * @return a Single that emits the item emitted by the source Single until such time as {@code other} terminates. * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Single<T> takeUntil(final CompletableSource other) { ObjectHelper.requireNonNull(other, "other is null"); return takeUntil(new CompletableToFlowable<T>(other)); } /** * Returns a Single that emits the item emitted by the source Single until a Publisher emits an item. Upon * emission of an item from {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The {@code other} publisher is consumed in an unbounded fashion but will be * cancelled after the first item it produced.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Publisher whose first emitted item will cause {@code takeUntil} to emit the item from the source * Single * @param <E> * the type of items emitted by {@code other} * @return a Single that emits the item emitted by the source Single until such time as {@code other} emits * its first item * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E> Single<T> takeUntil(final Publisher<E> other) { ObjectHelper.requireNonNull(other, "other is null"); return RxJavaPlugins.onAssembly(new SingleTakeUntil<T, E>(this, other)); } /** * Returns a Single that emits the item emitted by the source Single until a second Single emits an item. Upon * emission of an item from {@code other}, this will emit a {@link CancellationException} rather than go to * {@link SingleObserver#onSuccess(Object)}. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/takeUntil.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code takeUntil} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param other * the Single whose emitted item will cause {@code takeUntil} to emit the item from the source Single * @param <E> * the type of item emitted by {@code other} * @return a Single that emits the item emitted by the source Single until such time as {@code other} emits its item * @see <a href="http://reactivex.io/documentation/operators/takeuntil.html">ReactiveX operators documentation: TakeUntil</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <E> Single<T> takeUntil(final SingleSource<? extends E> other) { ObjectHelper.requireNonNull(other, "other is null"); return takeUntil(new SingleToFlowable<E>(other)); } /** * Signals a TimeoutException if the current Single doesn't signal a success value within the * specified timeout window. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} signals the TimeoutException on the {@code computation} {@link Scheduler}.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> timeout(long timeout, TimeUnit unit) { return timeout0(timeout, unit, Schedulers.computation(), null); } /** * Signals a TimeoutException if the current Single doesn't signal a success value within the * specified timeout window. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} signals the TimeoutException on the {@link Scheduler} you specify.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param scheduler the target scheduler where the timeout is awaited and the TimeoutException * signalled * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler) { return timeout0(timeout, unit, scheduler, null); } /** * Runs the current Single and if it doesn't signal within the specified timeout window, it is * cancelled and the other SingleSource subscribed to. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} subscribes to the other SingleSource on the {@link Scheduler} you specify.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param scheduler the scheduler where the timeout is awaited and the subscription to other happens * @param other the other SingleSource that gets subscribed to if the current Single times out * @return the new Single instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> timeout(long timeout, TimeUnit unit, Scheduler scheduler, SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return timeout0(timeout, unit, scheduler, other); } /** * Runs the current Single and if it doesn't signal within the specified timeout window, it is * cancelled and the other SingleSource subscribed to. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code timeout} subscribes to the other SingleSource on * the {@code computation} {@link Scheduler}.</dd> * </dl> * @param timeout the timeout amount * @param unit the time unit * @param other the other SingleSource that gets subscribed to if the current Single times out * @return the new Single instance * @throws NullPointerException * if other is null, or * if unit is null, or * if scheduler is null * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.COMPUTATION) public final Single<T> timeout(long timeout, TimeUnit unit, SingleSource<? extends T> other) { ObjectHelper.requireNonNull(other, "other is null"); return timeout0(timeout, unit, Schedulers.computation(), other); } private Single<T> timeout0(final long timeout, final TimeUnit unit, final Scheduler scheduler, final SingleSource<? extends T> other) { ObjectHelper.requireNonNull(unit, "unit is null"); ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleTimeout<T>(this, timeout, unit, scheduler, other)); } /** * Calls the specified converter function with the current Single instance * during assembly time and returns its result. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code to} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param <R> the result type * @param convert the function that is called with the current Single instance during * assembly time that should return some value to be the result * * @return the value returned by the convert function */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <R> R to(Function<? super Single<T>, R> convert) { try { return ObjectHelper.requireNonNull(convert, "convert is null").apply(this); } catch (Throwable ex) { Exceptions.throwIfFatal(ex); throw ExceptionHelper.wrapOrThrow(ex); } } /** * Returns a {@link Completable} that discards result of the {@link Single} * and calls {@code onComplete} when this source {@link Single} calls * {@code onSuccess}. Error terminal event is propagated. * <p> * <img width="640" height="436" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toCompletable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toCompletable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Completable} that calls {@code onComplete} on it's subscriber when the source {@link Single} * calls {@code onSuccess}. * @since 2.0 * @deprecated see {@link #ignoreElement()} instead, will be removed in 3.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @Deprecated public final Completable toCompletable() { return RxJavaPlugins.onAssembly(new CompletableFromSingle<T>(this)); } /** * Returns a {@link Completable} that ignores the success value of this {@link Single} * and calls {@code onComplete} instead on the returned {@code Completable}. * <p> * <img width="640" height="436" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.ignoreElement.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code ignoreElement} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Completable} that calls {@code onComplete} on it's observer when the source {@link Single} * calls {@code onSuccess}. * @since 2.1.13 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Completable ignoreElement() { return RxJavaPlugins.onAssembly(new CompletableFromSingle<T>(this)); } /** * Converts this Single into a {@link Flowable}. * <p> * <img width="640" height="462" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toFlowable.png" alt=""> * <dl> * <dt><b>Backpressure:</b></dt> * <dd>The returned {@code Flowable} honors the backpressure of the downstream consumer.</dd> * <dt><b>Scheduler:</b></dt> * <dd>{@code toFlowable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Flowable} that emits a single item T or an error. */ @BackpressureSupport(BackpressureKind.FULL) @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Flowable<T> toFlowable() { if (this instanceof FuseToFlowable) { return ((FuseToFlowable<T>)this).fuseToFlowable(); } return RxJavaPlugins.onAssembly(new SingleToFlowable<T>(this)); } /** * Returns a {@link Future} representing the single value emitted by this {@code Single}. * <p> * <img width="640" height="467" src="https://github.com/ReactiveX/RxJava/wiki/images/rx-operators/Single.toFuture.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toFuture} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Future} that expects a single item to be emitted by this {@code Single} * @see <a href="http://reactivex.io/documentation/operators/to.html">ReactiveX documentation: To</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final Future<T> toFuture() { return subscribeWith(new FutureSingleObserver<T>()); } /** * Converts this Single into a {@link Maybe}. * <p> * <img width="640" height="463" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toMaybe.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toMaybe} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return a {@link Maybe} that emits a single item T or an error. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Maybe<T> toMaybe() { if (this instanceof FuseToMaybe) { return ((FuseToMaybe<T>)this).fuseToMaybe(); } return RxJavaPlugins.onAssembly(new MaybeFromSingle<T>(this)); } /** * Converts this Single into an {@link Observable}. * <p> * <img width="640" height="305" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.toObservable.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code toObservable} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @return an {@link Observable} that emits a single item T or an error. */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) @SuppressWarnings("unchecked") public final Observable<T> toObservable() { if (this instanceof FuseToObservable) { return ((FuseToObservable<T>)this).fuseToObservable(); } return RxJavaPlugins.onAssembly(new SingleToObservable<T>(this)); } /** * Returns a Single which makes sure when a SingleObserver disposes the Disposable, * that call is propagated up on the specified scheduler. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code unsubscribeOn} calls dispose() of the upstream on the {@link Scheduler} you specify.</dd> * </dl> * <p>History: 2.0.9 - experimental * @param scheduler the target scheduler where to execute the cancellation * @return the new Single instance * @throws NullPointerException if scheduler is null * @since 2.2 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.CUSTOM) public final Single<T> unsubscribeOn(final Scheduler scheduler) { ObjectHelper.requireNonNull(scheduler, "scheduler is null"); return RxJavaPlugins.onAssembly(new SingleUnsubscribeOn<T>(this, scheduler)); } /** * Returns a Single that emits the result of applying a specified function to the pair of items emitted by * the source Single and another specified Single. * <p> * <img width="640" height="380" src="https://raw.github.com/wiki/ReactiveX/RxJava/images/rx-operators/Single.zip.png" alt=""> * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code zipWith} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * * @param <U> * the type of items emitted by the {@code other} Single * @param <R> * the type of items emitted by the resulting Single * @param other * the other SingleSource * @param zipper * a function that combines the pairs of items from the two SingleSources to generate the items to * be emitted by the resulting Single * @return a Single that pairs up values from the source Single and the {@code other} SingleSource * and emits the results of {@code zipFunction} applied to these pairs * @see <a href="http://reactivex.io/documentation/operators/zip.html">ReactiveX operators documentation: Zip</a> */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final <U, R> Single<R> zipWith(SingleSource<U> other, BiFunction<? super T, ? super U, ? extends R> zipper) { return zip(this, other, zipper); } // ------------------------------------------------------------------------- // Fluent test support, super handy and reduces test preparation boilerplate // ------------------------------------------------------------------------- /** * Creates a TestObserver and subscribes * it to this Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code test} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @return the new TestObserver instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final TestObserver<T> test() { TestObserver<T> to = new TestObserver<T>(); subscribe(to); return to; } /** * Creates a TestObserver optionally in cancelled state, then subscribes it to this Single. * <dl> * <dt><b>Scheduler:</b></dt> * <dd>{@code test} does not operate by default on a particular {@link Scheduler}.</dd> * </dl> * @param cancelled if true, the TestObserver will be cancelled before subscribing to this * Single. * @return the new TestObserver instance * @since 2.0 */ @CheckReturnValue @SchedulerSupport(SchedulerSupport.NONE) public final TestObserver<T> test(boolean cancelled) { TestObserver<T> to = new TestObserver<T>(); if (cancelled) { to.cancel(); } subscribe(to); return to; } private static <T> Single<T> toSingle(Flowable<T> source) { return RxJavaPlugins.onAssembly(new FlowableSingleSingle<T>(source, null)); } }
2.x: Add marbles for Single.concat operator (#6137) * Add marbles for Single.concat operator * Update URL for Single.concat marble diagram * Update Single.concat marble's height
src/main/java/io/reactivex/Single.java
2.x: Add marbles for Single.concat operator (#6137)
Java
apache-2.0
916cbe30170c25e085b23f27af0e7ceef08eb364
0
StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals,StyleTang/incubator-rocketmq-externals
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.replicator; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import io.openmessaging.KeyValue; import io.openmessaging.connector.api.data.DataEntryBuilder; import io.openmessaging.connector.api.data.EntryType; import io.openmessaging.connector.api.data.Field; import io.openmessaging.connector.api.data.FieldType; import io.openmessaging.connector.api.data.Schema; import io.openmessaging.connector.api.data.SourceDataEntry; import io.openmessaging.connector.api.source.SourceTask; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.rocketmq.client.consumer.DefaultMQPullConsumer; import org.apache.rocketmq.client.consumer.PullResult; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.replicator.common.Utils; import org.apache.rocketmq.replicator.config.ConfigUtil; import org.apache.rocketmq.replicator.config.DataType; import org.apache.rocketmq.replicator.config.TaskConfig; import org.apache.rocketmq.replicator.config.TaskTopicInfo; import org.apache.rocketmq.replicator.schema.FieldName; import org.apache.rocketmq.tools.admin.DefaultMQAdminExt; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; public class RmqSourceTask extends SourceTask { private static final Logger log = LoggerFactory.getLogger(RmqSourceTask.class); private final String taskId; private final TaskConfig config; private final DefaultMQPullConsumer consumer; private volatile boolean started = false; private Map<TaskTopicInfo, Long> mqOffsetMap; public RmqSourceTask() { this.config = new TaskConfig(); this.consumer = new DefaultMQPullConsumer(); this.taskId = Utils.createTaskId(Thread.currentThread().getName()); mqOffsetMap = new HashMap<>(); } public Collection<SourceDataEntry> poll() { if (this.config.getDataType() == DataType.COMMON_MESSAGE.ordinal()) { return pollCommonMessage(); } else if (this.config.getDataType() == DataType.TOPIC_CONFIG.ordinal()) { return pollTopicConfig(); } else if (this.config.getDataType() == DataType.BROKER_CONFIG.ordinal()) { return pollBrokerConfig(); } else { return pollSubConfig(); } } public void start(KeyValue config) { ConfigUtil.load(config, this.config); this.consumer.setConsumerGroup(this.taskId); this.consumer.setNamesrvAddr(this.config.getSourceRocketmq()); this.consumer.setInstanceName(Utils.createInstanceName(this.config.getSourceRocketmq())); List<TaskTopicInfo> topicList = JSONObject.parseArray(this.config.getTaskTopicList(), TaskTopicInfo.class); try { if (topicList == null) { throw new IllegalStateException("topicList is null"); } this.consumer.start(); for (TaskTopicInfo tti : topicList) { Set<MessageQueue> mqs = consumer.fetchSubscribeMessageQueues(tti.getTopic()); for (MessageQueue mq : mqs) { if (tti.getQueueId() == mq.getQueueId()) { ByteBuffer positionInfo = this.context.positionStorageReader().getPosition( ByteBuffer.wrap(RmqConstants.getPartition( mq.getTopic(), mq.getBrokerName(), String.valueOf(mq.getQueueId())).getBytes(StandardCharsets.UTF_8))); if (null != positionInfo && positionInfo.array().length > 0) { String positionJson = new String(positionInfo.array(), StandardCharsets.UTF_8); JSONObject jsonObject = JSONObject.parseObject(positionJson); this.config.setNextPosition(jsonObject.getLong(RmqConstants.NEXT_POSITION)); } else { this.config.setNextPosition(0L); } mqOffsetMap.put(tti, this.config.getNextPosition()); } } } started = true; } catch (Exception e) { log.error("Consumer of task {} start failed.", this.taskId, e); } log.info("RocketMQ source task started"); } @Override public void stop() { if (started) { if (this.consumer != null) { this.consumer.shutdown(); } started = false; } } public void pause() { } public void resume() { } private Collection<SourceDataEntry> pollCommonMessage() { List<SourceDataEntry> res = new ArrayList<>(); if (started) { try { for (TaskTopicInfo taskTopicConfig : this.mqOffsetMap.keySet()) { PullResult pullResult = consumer.pull(taskTopicConfig, "*", this.mqOffsetMap.get(taskTopicConfig), 32); switch (pullResult.getPullStatus()) { case FOUND: { this.mqOffsetMap.put(taskTopicConfig, pullResult.getNextBeginOffset()); JSONObject jsonObject = new JSONObject(); jsonObject.put(RmqConstants.NEXT_POSITION, pullResult.getNextBeginOffset()); List<MessageExt> msgs = pullResult.getMsgFoundList(); Schema schema = new Schema(); schema.setDataSource(this.config.getSourceRocketmq()); schema.setName(taskTopicConfig.getTopic()); schema.setFields(new ArrayList<>()); schema.getFields().add(new Field(0, FieldName.COMMON_MESSAGE.getKey(), FieldType.STRING)); for (MessageExt msg : msgs) { DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema); dataEntryBuilder.timestamp(System.currentTimeMillis()) .queue(this.config.getStoreTopic()).entryType(EntryType.CREATE); dataEntryBuilder.putFiled(FieldName.COMMON_MESSAGE.getKey(), new String(msg.getBody())); SourceDataEntry sourceDataEntry = dataEntryBuilder.buildSourceDataEntry( ByteBuffer.wrap(RmqConstants.getPartition( taskTopicConfig.getTopic(), taskTopicConfig.getBrokerName(), String.valueOf(taskTopicConfig.getQueueId())).getBytes(StandardCharsets.UTF_8)), ByteBuffer.wrap(jsonObject.toJSONString().getBytes(StandardCharsets.UTF_8)) ); sourceDataEntry.setQueueName(taskTopicConfig.getTargetTopic()); res.add(sourceDataEntry); } break; } default: break; } } } catch (Exception e) { log.error("Rocketmq replicator task poll error, current config: {}", JSON.toJSONString(config), e); } } else { if (System.currentTimeMillis() % 1000 == 0) { log.warn("Rocketmq replicator task is not started."); } } return res; } private Collection<SourceDataEntry> pollTopicConfig() { DefaultMQAdminExt srcMQAdminExt; return new ArrayList<>(); } private Collection<SourceDataEntry> pollBrokerConfig() { return new ArrayList<>(); } private Collection<SourceDataEntry> pollSubConfig() { return new ArrayList<>(); } }
rocketmq-replicator/src/main/java/org/apache/rocketmq/replicator/RmqSourceTask.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.rocketmq.replicator; import com.alibaba.fastjson.JSON; import com.alibaba.fastjson.JSONObject; import io.openmessaging.KeyValue; import io.openmessaging.connector.api.data.DataEntryBuilder; import io.openmessaging.connector.api.data.EntryType; import io.openmessaging.connector.api.data.Field; import io.openmessaging.connector.api.data.FieldType; import io.openmessaging.connector.api.data.Schema; import io.openmessaging.connector.api.data.SourceDataEntry; import io.openmessaging.connector.api.source.SourceTask; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.rocketmq.client.consumer.DefaultMQPullConsumer; import org.apache.rocketmq.client.consumer.PullResult; import org.apache.rocketmq.common.message.MessageExt; import org.apache.rocketmq.common.message.MessageQueue; import org.apache.rocketmq.replicator.common.Utils; import org.apache.rocketmq.replicator.config.ConfigUtil; import org.apache.rocketmq.replicator.config.DataType; import org.apache.rocketmq.replicator.config.TaskConfig; import org.apache.rocketmq.replicator.config.TaskTopicInfo; import org.apache.rocketmq.replicator.schema.FieldName; import org.apache.rocketmq.tools.admin.DefaultMQAdminExt; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.nio.ByteBuffer; public class RmqSourceTask extends SourceTask { private static final Logger log = LoggerFactory.getLogger(RmqSourceTask.class); private final String taskId; private final TaskConfig config; private final DefaultMQPullConsumer consumer; private volatile boolean started = false; private Map<TaskTopicInfo, Long> mqOffsetMap; public RmqSourceTask() { this.config = new TaskConfig(); this.consumer = new DefaultMQPullConsumer(); this.taskId = Utils.createTaskId(Thread.currentThread().getName()); mqOffsetMap = new HashMap<>(); } public Collection<SourceDataEntry> poll() { if (this.config.getDataType() == DataType.COMMON_MESSAGE.ordinal()) { return pollCommonMessage(); } else if (this.config.getDataType() == DataType.TOPIC_CONFIG.ordinal()) { return pollTopicConfig(); } else if (this.config.getDataType() == DataType.BROKER_CONFIG.ordinal()) { return pollBrokerConfig(); } else { return pollSubConfig(); } } public void start(KeyValue config) { ConfigUtil.load(config, this.config); this.consumer.setConsumerGroup(this.taskId); this.consumer.setNamesrvAddr(this.config.getSourceRocketmq()); this.consumer.setInstanceName(Utils.createInstanceName(this.config.getSourceRocketmq())); List<TaskTopicInfo> topicList = JSONObject.parseArray(this.config.getTaskTopicList(), TaskTopicInfo.class); try { if (topicList == null) { throw new IllegalStateException("topicList is null"); } this.consumer.start(); for (TaskTopicInfo tti : topicList) { Set<MessageQueue> mqs = consumer.fetchSubscribeMessageQueues(tti.getTopic()); for (MessageQueue mq : mqs) { if (tti.getQueueId() == mq.getQueueId()) { ByteBuffer positionInfo = this.context.positionStorageReader().getPosition( ByteBuffer.wrap(RmqConstants.getPartition( mq.getTopic(), mq.getBrokerName(), String.valueOf(mq.getQueueId())).getBytes(StandardCharsets.UTF_8))); if (null != positionInfo && positionInfo.array().length > 0) { String positionJson = new String(positionInfo.array(), StandardCharsets.UTF_8); JSONObject jsonObject = JSONObject.parseObject(positionJson); this.config.setNextPosition(jsonObject.getLong(RmqConstants.NEXT_POSITION)); } else { this.config.setNextPosition(0L); } mqOffsetMap.put(tti, this.config.getNextPosition()); } } } started = true; } catch (Exception e) { log.error("Consumer of task {} start failed.", this.taskId, e); } log.info("RocketMQ source task started"); } @Override public void stop() { if (started) { if (this.consumer != null) { this.consumer.shutdown(); } started = false; } } public void pause() { } public void resume() { } private Collection<SourceDataEntry> pollCommonMessage() { List<SourceDataEntry> res = new ArrayList<>(); if (started) { try { for (TaskTopicInfo taskTopicConfig : this.mqOffsetMap.keySet()) { PullResult pullResult = consumer.pull(taskTopicConfig, "*", this.mqOffsetMap.get(taskTopicConfig), 32); switch (pullResult.getPullStatus()) { case FOUND: { this.mqOffsetMap.put(taskTopicConfig, pullResult.getNextBeginOffset()); JSONObject jsonObject = new JSONObject(); jsonObject.put(RmqConstants.NEXT_POSITION, pullResult.getNextBeginOffset()); List<MessageExt> msgs = pullResult.getMsgFoundList(); Schema schema = new Schema(); schema.setDataSource(this.config.getSourceRocketmq()); schema.setName(taskTopicConfig.getTopic()); schema.setFields(new ArrayList<>()); schema.getFields().add(new Field(0, FieldName.COMMON_MESSAGE.getKey(), FieldType.STRING)); DataEntryBuilder dataEntryBuilder = new DataEntryBuilder(schema); dataEntryBuilder.timestamp(System.currentTimeMillis()) .queue(this.config.getStoreTopic()).entryType(EntryType.CREATE); for (MessageExt msg : msgs) { dataEntryBuilder.putFiled(FieldName.COMMON_MESSAGE.getKey(), new String(msg.getBody())); SourceDataEntry sourceDataEntry = dataEntryBuilder.buildSourceDataEntry( ByteBuffer.wrap(RmqConstants.getPartition( taskTopicConfig.getTopic(), taskTopicConfig.getBrokerName(), String.valueOf(taskTopicConfig.getQueueId())).getBytes(StandardCharsets.UTF_8)), ByteBuffer.wrap(jsonObject.toJSONString().getBytes(StandardCharsets.UTF_8)) ); sourceDataEntry.setQueueName(taskTopicConfig.getTargetTopic()); res.add(sourceDataEntry); } break; } default: break; } } } catch (Exception e) { log.error("Rocketmq replicator task poll error, current config: {}", JSON.toJSONString(config), e); } } else { if (System.currentTimeMillis() % 1000 == 0) { log.warn("Rocketmq replicator task is not started."); } } return res; } private Collection<SourceDataEntry> pollTopicConfig() { DefaultMQAdminExt srcMQAdminExt; return new ArrayList<>(); } private Collection<SourceDataEntry> pollBrokerConfig() { return new ArrayList<>(); } private Collection<SourceDataEntry> pollSubConfig() { return new ArrayList<>(); } }
[Replicator] Fix message duplication problem (#692) Signed-off-by: zhangyang <a8166ffe88d07826448eb0bd87845596df1ab293@163.com>
rocketmq-replicator/src/main/java/org/apache/rocketmq/replicator/RmqSourceTask.java
[Replicator] Fix message duplication problem (#692)
Java
apache-2.0
6a91360ddcceb0ab5e48e3ae62611bfd13f50972
0
Kirezzz/java_programming
package ru.stqa.java.adressbook; import org.testng.annotations.BeforeMethod; import org.testng.annotations.AfterMethod; import org.testng.annotations.Test; import java.util.concurrent.TimeUnit; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.*; public class AddressCreationTests { FirefoxDriver wd; @BeforeMethod public void setUp() throws Exception { wd = new FirefoxDriver(); wd.manage().timeouts().implicitlyWait(60, TimeUnit.SECONDS); wd.get("http://localhost/addressbook/"); login("admin", "secret"); } @Test public void testAddressCreation() { initAddressCreation(); fillAddressForm(new AddressData("First name1", "Middle name1", "Last name1", "address1", "111-222-333", "test1@test.com")); submitAddressCreation(); returnToHomePage(); } private void returnToHomePage() { wd.findElement(By.linkText("home page")).click(); } private void submitAddressCreation() { wd.findElement(By.xpath("//div[@id='content']/form/input[21]")).click(); } private void fillAddressForm(AddressData addressData) { wd.findElement(By.name("firstname")).click(); wd.findElement(By.name("firstname")).clear(); wd.findElement(By.name("firstname")).sendKeys(addressData.getFirstname()); wd.findElement(By.name("middlename")).click(); wd.findElement(By.name("middlename")).clear(); wd.findElement(By.name("middlename")).sendKeys(addressData.getMiddlename()); wd.findElement(By.name("lastname")).click(); wd.findElement(By.name("lastname")).clear(); wd.findElement(By.name("lastname")).sendKeys(addressData.getLastname()); wd.findElement(By.name("address")).click(); wd.findElement(By.name("address")).clear(); wd.findElement(By.name("address")).sendKeys(addressData.getAddress1()); wd.findElement(By.name("home")).click(); wd.findElement(By.name("home")).clear(); wd.findElement(By.name("home")).sendKeys(addressData.getTelhome()); wd.findElement(By.name("email")).click(); wd.findElement(By.name("email")).clear(); wd.findElement(By.name("email")).sendKeys(addressData.getEmail1()); } private void initAddressCreation() { wd.findElement(By.linkText("add new")).click(); } private void login(String username, String password) { wd.findElement(By.name("user")).click(); wd.findElement(By.name("user")).clear(); wd.findElement(By.name("user")).sendKeys(username); wd.findElement(By.name("pass")).click(); wd.findElement(By.name("pass")).clear(); wd.findElement(By.name("pass")).sendKeys(password); wd.findElement(By.xpath("//form[@id='LoginForm']/input[3]")).click(); } @AfterMethod public void tearDown() { wd.quit(); } public static boolean isAlertPresent(FirefoxDriver wd) { try { wd.switchTo().alert(); return true; } catch (NoAlertPresentException e) { return false; } } }
addressbook-web-tests/src/test/java/ru/stqa/java/adressbook/AddressCreationTests.java
package ru.stqa.java.adressbook; import org.testng.annotations.BeforeMethod; import org.testng.annotations.AfterMethod; import org.testng.annotations.Test; import java.util.concurrent.TimeUnit; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.*; public class AddressCreationTests { FirefoxDriver wd; @BeforeMethod public void setUp() throws Exception { wd = new FirefoxDriver(); wd.manage().timeouts().implicitlyWait(60, TimeUnit.SECONDS); wd.get("http://localhost/addressbook/"); login("admin", "secret"); } @Test public void AddressCreationTests() { initAddressCreation(); fillAddressForm(new AddressData("First name1", "Middle name1", "Last name1", "address1", "111-222-333", "test1@test.com")); submitAddressCreation(); returnToHomePage(); } private void returnToHomePage() { wd.findElement(By.linkText("home page")).click(); } private void submitAddressCreation() { wd.findElement(By.xpath("//div[@id='content']/form/input[21]")).click(); } private void fillAddressForm(AddressData addressData) { wd.findElement(By.name("firstname")).click(); wd.findElement(By.name("firstname")).clear(); wd.findElement(By.name("firstname")).sendKeys(addressData.getFirstname()); wd.findElement(By.name("middlename")).click(); wd.findElement(By.name("middlename")).clear(); wd.findElement(By.name("middlename")).sendKeys(addressData.getMiddlename()); wd.findElement(By.name("lastname")).click(); wd.findElement(By.name("lastname")).clear(); wd.findElement(By.name("lastname")).sendKeys(addressData.getLastname()); wd.findElement(By.name("address")).click(); wd.findElement(By.name("address")).clear(); wd.findElement(By.name("address")).sendKeys(addressData.getAddress1()); wd.findElement(By.name("home")).click(); wd.findElement(By.name("home")).clear(); wd.findElement(By.name("home")).sendKeys(addressData.getTelhome()); wd.findElement(By.name("email")).click(); wd.findElement(By.name("email")).clear(); wd.findElement(By.name("email")).sendKeys(addressData.getEmail1()); } private void initAddressCreation() { wd.findElement(By.linkText("add new")).click(); } private void login(String username, String password) { wd.findElement(By.name("user")).click(); wd.findElement(By.name("user")).clear(); wd.findElement(By.name("user")).sendKeys(username); wd.findElement(By.name("pass")).click(); wd.findElement(By.name("pass")).clear(); wd.findElement(By.name("pass")).sendKeys(password); wd.findElement(By.xpath("//form[@id='LoginForm']/input[3]")).click(); } @AfterMethod public void tearDown() { wd.quit(); } public static boolean isAlertPresent(FirefoxDriver wd) { try { wd.switchTo().alert(); return true; } catch (NoAlertPresentException e) { return false; } } }
Поправил название метода в AddressCreationTests
addressbook-web-tests/src/test/java/ru/stqa/java/adressbook/AddressCreationTests.java
Поправил название метода в AddressCreationTests
Java
apache-2.0
e1a46881212950d2cfc450a9180cabed3316f0c2
0
senseobservationsystems/sense-android-library,senseobservationsystems/sense-android-library
package nl.sense.demo; import org.json.JSONArray; import android.os.Bundle; import android.app.Activity; import android.content.ComponentName; import android.util.Log; import android.view.Menu; import nl.sense.demo.R; //import bunch of sense library stuff import nl.sense_os.platform.SensePlatform; import nl.sense_os.platform.ServiceConnectionEventHandler; import nl.sense_os.service.ISenseService; import nl.sense_os.service.ISenseServiceCallback; import android.os.RemoteException; import nl.sense_os.service.commonsense.SenseApi; import nl.sense_os.service.constants.SensePrefs; import nl.sense_os.service.constants.SensePrefs.Main.Ambience; import nl.sense_os.service.constants.SensePrefs.Main.Location; public class MainActivity extends Activity implements ServiceConnectionEventHandler { private static final String TAG = "Sense Demo"; private SensePlatform sensePlatform; /** * Service stub for callbacks from the Sense service. */ private class SenseCallback extends ISenseServiceCallback.Stub { @Override public void onChangeLoginResult(int result) throws RemoteException { switch (result) { case 0: Log.v(TAG, "Change login OK"); loggedIn(); break; case -1: Log.v(TAG, "Login failed! Connectivity problems?"); break; case -2: Log.v(TAG, "Login failed! Invalid username or password."); break; default: Log.w(TAG, "Unexpected login result! Unexpected result: " + result); } } @Override public void onRegisterResult(int result) throws RemoteException { } @Override public void statusReport(final int status) { } } private SenseCallback callback = new SenseCallback(); @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.activity_main, menu); return true; } @Override protected void onStart() { Log.v(TAG, "onStart"); super.onStart(); sensePlatform = new SensePlatform(this, this); } @Override protected void onStop() { Log.v(TAG, "onStop"); super.onStop(); } private void setupSense() { try { sensePlatform.login("foo", "bar", callback); //turn off specific sensors sensePlatform.service().setPrefBool(Ambience.LIGHT, false); sensePlatform.service().setPrefBool(Ambience.CAMERA_LIGHT, false); sensePlatform.service().setPrefBool(Ambience.PRESSURE, false); //turn on specific sensors sensePlatform.service().setPrefBool(Ambience.MIC, true); //NOTE: spectrum might be too heavy for the phone or consume too much energy sensePlatform.service().setPrefBool(Ambience.AUDIO_SPECTRUM, true); sensePlatform.service().setPrefBool(Location.GPS, true); sensePlatform.service().setPrefBool(Location.NETWORK, true); sensePlatform.service().setPrefBool(Location.AUTO_GPS, true); //set how often to sample sensePlatform.service().setPrefString(SensePrefs.Main.SAMPLE_RATE, "0"); //set how often to upload // 0 == eco mode // 1 == normal (5 min) //-1 == often (1 min) //-2 == realtime //NOTE, this setting affects power consumption considerately! sensePlatform.service().setPrefString(SensePrefs.Main.SYNC_RATE, "-2"); } catch (Exception e) { Log.v(TAG, "Exception " + e + " while setting up sense library."); e.printStackTrace(); } } void loggedIn() { try { // turn it on sensePlatform.service().toggleMain(true); sensePlatform.service().toggleAmbience(true); sensePlatform.service().toggleLocation(true); } catch (Exception e) { Log.v(TAG, "Exception " + e + " while starting sense library."); e.printStackTrace(); } } /** An example of how to upload data for a custom sensor. */ void sendData() { //Description of the sensor String name = "position_annotation"; String displayName = "Annotation"; String dataType = "json"; String description = name; //the value to be sent, in json format String value = "{\"latitude\":\"51.903469\",\"longitude\":\"4.459865\",\"comment\":\"What a nice quiet place!\"}"; //json value long timestamp = System.currentTimeMillis(); try { sensePlatform.addDataPoint(name, displayName, description, dataType, value, timestamp); } catch (Exception e) { Log.e(TAG, "Failed to add data point."); e.printStackTrace(); } } /** An example how to get data from a sensor * */ void getData() { try { JSONArray data = sensePlatform.getData("position", true, 10); Log.v(TAG, "Received:" + data); } catch (Exception e) { Log.e(TAG, "Failed to get data."); e.printStackTrace(); } } @Override public void onServiceConnected(ComponentName className, ISenseService service) { setupSense(); sendData(); //TODO: it seems this request is performed too early? getData(); } @Override public void onServiceDisconnected(ComponentName className) { } }
Demo/src/nl/sense/demo/MainActivity.java
package nl.sense.demo; import org.json.JSONArray; import android.os.Bundle; import android.app.Activity; import android.content.ComponentName; import android.util.Log; import android.view.Menu; import nl.sense.demo.R; //import bunch of sense library stuff import nl.sense_os.platform.SensePlatform; import nl.sense_os.platform.ServiceConnectionEventHandler; import nl.sense_os.service.ISenseService; import nl.sense_os.service.commonsense.SenseApi; import nl.sense_os.service.constants.SensePrefs; import nl.sense_os.service.constants.SensePrefs.Main.Ambience; import nl.sense_os.service.constants.SensePrefs.Main.Location; public class MainActivity extends Activity implements ServiceConnectionEventHandler { private static final String TAG = "Sense Demo"; private SensePlatform sensePlatform; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.activity_main, menu); return true; } @Override protected void onStart() { Log.v(TAG, "onStart"); super.onStart(); sensePlatform = new SensePlatform(this, this); } @Override protected void onStop() { Log.v(TAG, "onStop"); super.onStop(); } private void setupSense() { try { sensePlatform.login("foo", "bar"); //turn off specific sensors sensePlatform.service().setPrefBool(Ambience.LIGHT, false); sensePlatform.service().setPrefBool(Ambience.CAMERA_LIGHT, false); sensePlatform.service().setPrefBool(Ambience.PRESSURE, false); //turn on specific sensors sensePlatform.service().setPrefBool(Ambience.MIC, true); //NOTE: spectrum might be too heavy for the phone or consume too much energy sensePlatform.service().setPrefBool(Ambience.AUDIO_SPECTRUM, true); sensePlatform.service().setPrefBool(Location.GPS, true); sensePlatform.service().setPrefBool(Location.NETWORK, true); sensePlatform.service().setPrefBool(Location.AUTO_GPS, true); //set how often to sample sensePlatform.service().setPrefString(SensePrefs.Main.SAMPLE_RATE, "0"); //set how often to upload // 0 == eco mode // 1 == normal (5 min) //-1 == often (1 min) //-2 == realtime //NOTE, this setting affects power consumption considerately! sensePlatform.service().setPrefString(SensePrefs.Main.SYNC_RATE, "-2"); //and turn it on sensePlatform.service().toggleMain(true); sensePlatform.service().toggleAmbience(true); sensePlatform.service().toggleLocation(true); } catch (Exception e) { Log.v(TAG, "Exception " + e + " while setting up sense library."); e.printStackTrace(); } } /** An example of how to upload data for a custom sensor. */ void sendData() { //Description of the sensor String name = "position_annotation"; String displayName = "Annotation"; String dataType = "json"; String description = name; //the value to be sent, in json format String value = "{\"latitude\":\"51.903469\",\"longitude\":\"4.459865\",\"comment\":\"What a nice quiet place!\"}"; //json value long timestamp = System.currentTimeMillis(); try { sensePlatform.addDataPoint(name, displayName, description, dataType, value, timestamp); } catch (Exception e) { Log.e(TAG, "Failed to add data point."); e.printStackTrace(); } } /** An example how to get data from a sensor * */ void getData() { try { JSONArray data = sensePlatform.getData("position", true, 10); Log.v(TAG, "Received:" + data); } catch (Exception e) { Log.e(TAG, "Failed to get data."); e.printStackTrace(); } } @Override public void onServiceConnected(ComponentName className, ISenseService service) { setupSense(); sendData(); //TODO: it seems this request is performed too early? getData(); } @Override public void onServiceDisconnected(ComponentName className) { } }
Update due to library update. Use callback for login.
Demo/src/nl/sense/demo/MainActivity.java
Update due to library update.
Java
apache-2.0
40dad6dff48f1fc26546ad6e6faca27db6d737cd
0
implydata/druid,mghosh4/druid,yaochitc/druid-dev,deltaprojects/druid,potto007/druid-avro,b-slim/druid,gianm/druid,yaochitc/druid-dev,michaelschiff/druid,pjain1/druid,solimant/druid,pjain1/druid,andy256/druid,pdeva/druid,pdeva/druid,michaelschiff/druid,Fokko/druid,tubemogul/druid,leventov/druid,yaochitc/druid-dev,mrijke/druid,praveev/druid,knoguchi/druid,winval/druid,pdeva/druid,Fokko/druid,solimant/druid,dclim/druid,liquidm/druid,rasahner/druid,himanshug/druid,gianm/druid,monetate/druid,deltaprojects/druid,jon-wei/druid,yaochitc/druid-dev,winval/druid,nishantmonu51/druid,jon-wei/druid,druid-io/druid,monetate/druid,solimant/druid,redBorder/druid,winval/druid,dclim/druid,dkhwangbo/druid,pdeva/druid,lizhanhui/data_druid,nishantmonu51/druid,himanshug/druid,deltaprojects/druid,potto007/druid-avro,pdeva/druid,lizhanhui/data_druid,metamx/druid,mghosh4/druid,du00cs/druid,jon-wei/druid,praveev/druid,implydata/druid,monetate/druid,noddi/druid,b-slim/druid,liquidm/druid,nishantmonu51/druid,KurtYoung/druid,lizhanhui/data_druid,andy256/druid,dkhwangbo/druid,metamx/druid,KurtYoung/druid,himanshug/druid,pjain1/druid,solimant/druid,michaelschiff/druid,du00cs/druid,druid-io/druid,guobingkun/druid,b-slim/druid,redBorder/druid,noddi/druid,dclim/druid,Fokko/druid,erikdubbelboer/druid,knoguchi/druid,zxs/druid,michaelschiff/druid,mghosh4/druid,potto007/druid-avro,taochaoqiang/druid,implydata/druid,deltaprojects/druid,praveev/druid,andy256/druid,pjain1/druid,liquidm/druid,potto007/druid-avro,guobingkun/druid,guobingkun/druid,gianm/druid,dclim/druid,b-slim/druid,redBorder/druid,deltaprojects/druid,mrijke/druid,zhihuij/druid,implydata/druid,mrijke/druid,liquidm/druid,liquidm/druid,zxs/druid,Fokko/druid,gianm/druid,jon-wei/druid,andy256/druid,pjain1/druid,mrijke/druid,taochaoqiang/druid,Fokko/druid,andy256/druid,monetate/druid,knoguchi/druid,zhihuij/druid,mghosh4/druid,pjain1/druid,tubemogul/druid,himanshug/druid,erikdubbelboer/druid,guobingkun/druid,mghosh4/druid,lizhanhui/data_druid,mrijke/druid,redBorder/druid,leventov/druid,dkhwangbo/druid,KurtYoung/druid,monetate/druid,winval/druid,druid-io/druid,KurtYoung/druid,michaelschiff/druid,michaelschiff/druid,redBorder/druid,nishantmonu51/druid,druid-io/druid,erikdubbelboer/druid,tubemogul/druid,erikdubbelboer/druid,noddi/druid,implydata/druid,jon-wei/druid,metamx/druid,pjain1/druid,leventov/druid,Fokko/druid,potto007/druid-avro,metamx/druid,himanshug/druid,winval/druid,gianm/druid,liquidm/druid,praveev/druid,nishantmonu51/druid,leventov/druid,noddi/druid,implydata/druid,nishantmonu51/druid,druid-io/druid,guobingkun/druid,metamx/druid,dkhwangbo/druid,michaelschiff/druid,gianm/druid,monetate/druid,du00cs/druid,tubemogul/druid,lizhanhui/data_druid,noddi/druid,zxs/druid,tubemogul/druid,praveev/druid,solimant/druid,nishantmonu51/druid,zxs/druid,leventov/druid,jon-wei/druid,knoguchi/druid,rasahner/druid,erikdubbelboer/druid,knoguchi/druid,jon-wei/druid,rasahner/druid,b-slim/druid,mghosh4/druid,Fokko/druid,zhihuij/druid,zhihuij/druid,yaochitc/druid-dev,monetate/druid,zhihuij/druid,taochaoqiang/druid,zxs/druid,dclim/druid,mghosh4/druid,gianm/druid,du00cs/druid,rasahner/druid,taochaoqiang/druid,taochaoqiang/druid,du00cs/druid,deltaprojects/druid,dkhwangbo/druid,deltaprojects/druid,rasahner/druid,KurtYoung/druid
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.timeseries; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.QueryGranularity; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; import io.druid.query.IntervalChunkingQueryRunnerDecorator; import io.druid.query.Query; import io.druid.query.QueryCacheHelper; import io.druid.query.QueryRunner; import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.ResultGranularTimestampComparator; import io.druid.query.ResultMergeQueryRunner; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.query.aggregation.PostAggregator; import io.druid.query.filter.DimFilter; import org.joda.time.DateTime; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; import java.util.Map; /** */ public class TimeseriesQueryQueryToolChest extends QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> { private static final byte TIMESERIES_QUERY = 0x0; private static final TypeReference<Object> OBJECT_TYPE_REFERENCE = new TypeReference<Object>() { }; private static final TypeReference<Result<TimeseriesResultValue>> TYPE_REFERENCE = new TypeReference<Result<TimeseriesResultValue>>() { }; private final IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator; @Inject public TimeseriesQueryQueryToolChest(IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator) { this.intervalChunkingQueryRunnerDecorator = intervalChunkingQueryRunnerDecorator; } @Override public QueryRunner<Result<TimeseriesResultValue>> mergeResults( QueryRunner<Result<TimeseriesResultValue>> queryRunner ) { return new ResultMergeQueryRunner<Result<TimeseriesResultValue>>(queryRunner) { @Override protected Ordering<Result<TimeseriesResultValue>> makeOrdering(Query<Result<TimeseriesResultValue>> query) { return ResultGranularTimestampComparator.create( ((TimeseriesQuery) query).getGranularity(), query.isDescending() ); } @Override protected BinaryFn<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> createMergeFn( Query<Result<TimeseriesResultValue>> input ) { TimeseriesQuery query = (TimeseriesQuery) input; return new TimeseriesBinaryFn( query.getGranularity(), query.getAggregatorSpecs() ); } }; } @Override public ServiceMetricEvent.Builder makeMetricBuilder(TimeseriesQuery query) { return DruidMetrics.makePartialQueryTimeMetric(query) .setDimension( "numMetrics", String.valueOf(query.getAggregatorSpecs().size()) ) .setDimension( "numComplexMetrics", String.valueOf(DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs())) ); } @Override public TypeReference<Result<TimeseriesResultValue>> getResultTypeReference() { return TYPE_REFERENCE; } @Override public CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> getCacheStrategy(final TimeseriesQuery query) { return new CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery>() { private final List<AggregatorFactory> aggs = query.getAggregatorSpecs(); @Override public byte[] computeCacheKey(TimeseriesQuery query) { final DimFilter dimFilter = query.getDimensionsFilter(); final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey(); final byte[] aggregatorBytes = QueryCacheHelper.computeAggregatorBytes(query.getAggregatorSpecs()); final byte[] granularityBytes = query.getGranularity().cacheKey(); final byte descending = query.isDescending() ? (byte) 1 : 0; final byte skipEmptyBuckets = query.isSkipEmptyBuckets() ? (byte) 1 : 0; return ByteBuffer .allocate(3 + granularityBytes.length + filterBytes.length + aggregatorBytes.length) .put(TIMESERIES_QUERY) .put(descending) .put(skipEmptyBuckets) .put(granularityBytes) .put(filterBytes) .put(aggregatorBytes) .array(); } @Override public TypeReference<Object> getCacheObjectClazz() { return OBJECT_TYPE_REFERENCE; } @Override public Function<Result<TimeseriesResultValue>, Object> prepareForCache() { return new Function<Result<TimeseriesResultValue>, Object>() { @Override public Object apply(final Result<TimeseriesResultValue> input) { TimeseriesResultValue results = input.getValue(); final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size()); retVal.add(input.getTimestamp().getMillis()); for (AggregatorFactory agg : aggs) { retVal.add(results.getMetric(agg.getName())); } return retVal; } }; } @Override public Function<Object, Result<TimeseriesResultValue>> pullFromCache() { return new Function<Object, Result<TimeseriesResultValue>>() { private final QueryGranularity granularity = query.getGranularity(); @Override public Result<TimeseriesResultValue> apply(@Nullable Object input) { List<Object> results = (List<Object>) input; Map<String, Object> retVal = Maps.newLinkedHashMap(); Iterator<AggregatorFactory> aggsIter = aggs.iterator(); Iterator<Object> resultIter = results.iterator(); DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue()); while (aggsIter.hasNext() && resultIter.hasNext()) { final AggregatorFactory factory = aggsIter.next(); retVal.put(factory.getName(), factory.deserialize(resultIter.next())); } return new Result<TimeseriesResultValue>( timestamp, new TimeseriesResultValue(retVal) ); } }; } }; } @Override public QueryRunner<Result<TimeseriesResultValue>> preMergeQueryDecoration(QueryRunner<Result<TimeseriesResultValue>> runner) { return intervalChunkingQueryRunnerDecorator.decorate(runner, this); } @Override public Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makePreComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn ) { return makeComputeManipulatorFn(query, fn, false); } @Override public Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makePostComputeManipulatorFn( TimeseriesQuery query, MetricManipulationFn fn ) { return makeComputeManipulatorFn(query, fn, true); } private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return new Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>>() { @Override public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> result) { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = Maps.newHashMap(holder.getBaseObject()); if (calculatePostAggs) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<TimeseriesResultValue>( result.getTimestamp(), new TimeseriesResultValue(values) ); } }; } }
processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.query.timeseries; import com.fasterxml.jackson.core.type.TypeReference; import com.google.common.base.Function; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Ordering; import com.google.inject.Inject; import com.metamx.common.guava.nary.BinaryFn; import com.metamx.emitter.service.ServiceMetricEvent; import io.druid.granularity.QueryGranularity; import io.druid.query.CacheStrategy; import io.druid.query.DruidMetrics; import io.druid.query.IntervalChunkingQueryRunnerDecorator; import io.druid.query.Query; import io.druid.query.QueryCacheHelper; import io.druid.query.QueryRunner; import io.druid.query.QueryToolChest; import io.druid.query.Result; import io.druid.query.ResultGranularTimestampComparator; import io.druid.query.ResultMergeQueryRunner; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.MetricManipulationFn; import io.druid.query.aggregation.PostAggregator; import io.druid.query.filter.DimFilter; import org.joda.time.DateTime; import javax.annotation.Nullable; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; import java.util.Map; /** */ public class TimeseriesQueryQueryToolChest extends QueryToolChest<Result<TimeseriesResultValue>, TimeseriesQuery> { private static final byte TIMESERIES_QUERY = 0x0; private static final TypeReference<Object> OBJECT_TYPE_REFERENCE = new TypeReference<Object>() { }; private static final TypeReference<Result<TimeseriesResultValue>> TYPE_REFERENCE = new TypeReference<Result<TimeseriesResultValue>>() { }; private final IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator; @Inject public TimeseriesQueryQueryToolChest(IntervalChunkingQueryRunnerDecorator intervalChunkingQueryRunnerDecorator) { this.intervalChunkingQueryRunnerDecorator = intervalChunkingQueryRunnerDecorator; } @Override public QueryRunner<Result<TimeseriesResultValue>> mergeResults( QueryRunner<Result<TimeseriesResultValue>> queryRunner ) { return new ResultMergeQueryRunner<Result<TimeseriesResultValue>>(queryRunner) { @Override protected Ordering<Result<TimeseriesResultValue>> makeOrdering(Query<Result<TimeseriesResultValue>> query) { return ResultGranularTimestampComparator.create( ((TimeseriesQuery) query).getGranularity(), query.isDescending() ); } @Override protected BinaryFn<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> createMergeFn( Query<Result<TimeseriesResultValue>> input ) { TimeseriesQuery query = (TimeseriesQuery) input; return new TimeseriesBinaryFn( query.getGranularity(), query.getAggregatorSpecs() ); } }; } @Override public ServiceMetricEvent.Builder makeMetricBuilder(TimeseriesQuery query) { return DruidMetrics.makePartialQueryTimeMetric(query) .setDimension( "numMetrics", String.valueOf(query.getAggregatorSpecs().size()) ) .setDimension( "numComplexMetrics", String.valueOf(DruidMetrics.findNumComplexAggs(query.getAggregatorSpecs())) ); } @Override public TypeReference<Result<TimeseriesResultValue>> getResultTypeReference() { return TYPE_REFERENCE; } @Override public CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery> getCacheStrategy(final TimeseriesQuery query) { return new CacheStrategy<Result<TimeseriesResultValue>, Object, TimeseriesQuery>() { private final List<AggregatorFactory> aggs = query.getAggregatorSpecs(); @Override public byte[] computeCacheKey(TimeseriesQuery query) { final DimFilter dimFilter = query.getDimensionsFilter(); final byte[] filterBytes = dimFilter == null ? new byte[]{} : dimFilter.getCacheKey(); final byte[] aggregatorBytes = QueryCacheHelper.computeAggregatorBytes(query.getAggregatorSpecs()); final byte[] granularityBytes = query.getGranularity().cacheKey(); final byte descending = query.isDescending() ? (byte)1 : 0; return ByteBuffer .allocate(2 + granularityBytes.length + filterBytes.length + aggregatorBytes.length) .put(TIMESERIES_QUERY) .put(descending) .put(granularityBytes) .put(filterBytes) .put(aggregatorBytes) .array(); } @Override public TypeReference<Object> getCacheObjectClazz() { return OBJECT_TYPE_REFERENCE; } @Override public Function<Result<TimeseriesResultValue>, Object> prepareForCache() { return new Function<Result<TimeseriesResultValue>, Object>() { @Override public Object apply(final Result<TimeseriesResultValue> input) { TimeseriesResultValue results = input.getValue(); final List<Object> retVal = Lists.newArrayListWithCapacity(1 + aggs.size()); retVal.add(input.getTimestamp().getMillis()); for (AggregatorFactory agg : aggs) { retVal.add(results.getMetric(agg.getName())); } return retVal; } }; } @Override public Function<Object, Result<TimeseriesResultValue>> pullFromCache() { return new Function<Object, Result<TimeseriesResultValue>>() { private final QueryGranularity granularity = query.getGranularity(); @Override public Result<TimeseriesResultValue> apply(@Nullable Object input) { List<Object> results = (List<Object>) input; Map<String, Object> retVal = Maps.newLinkedHashMap(); Iterator<AggregatorFactory> aggsIter = aggs.iterator(); Iterator<Object> resultIter = results.iterator(); DateTime timestamp = granularity.toDateTime(((Number) resultIter.next()).longValue()); while (aggsIter.hasNext() && resultIter.hasNext()) { final AggregatorFactory factory = aggsIter.next(); retVal.put(factory.getName(), factory.deserialize(resultIter.next())); } return new Result<TimeseriesResultValue>( timestamp, new TimeseriesResultValue(retVal) ); } }; } }; } @Override public QueryRunner<Result<TimeseriesResultValue>> preMergeQueryDecoration(QueryRunner<Result<TimeseriesResultValue>> runner) { return intervalChunkingQueryRunnerDecorator.decorate(runner, this); } @Override public Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makePreComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn ) { return makeComputeManipulatorFn(query, fn, false); } @Override public Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makePostComputeManipulatorFn( TimeseriesQuery query, MetricManipulationFn fn ) { return makeComputeManipulatorFn(query, fn, true); } private Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>> makeComputeManipulatorFn( final TimeseriesQuery query, final MetricManipulationFn fn, final boolean calculatePostAggs ) { return new Function<Result<TimeseriesResultValue>, Result<TimeseriesResultValue>>() { @Override public Result<TimeseriesResultValue> apply(Result<TimeseriesResultValue> result) { final TimeseriesResultValue holder = result.getValue(); final Map<String, Object> values = Maps.newHashMap(holder.getBaseObject()); if (calculatePostAggs) { // put non finalized aggregators for calculating dependent post Aggregators for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), holder.getMetric(agg.getName())); } for (PostAggregator postAgg : query.getPostAggregatorSpecs()) { values.put(postAgg.getName(), postAgg.compute(values)); } } for (AggregatorFactory agg : query.getAggregatorSpecs()) { values.put(agg.getName(), fn.manipulate(agg, holder.getMetric(agg.getName()))); } return new Result<TimeseriesResultValue>( result.getTimestamp(), new TimeseriesResultValue(values) ); } }; } }
Fix caching of skipEmptyBuckets for TimeseriesQuery.
processing/src/main/java/io/druid/query/timeseries/TimeseriesQueryQueryToolChest.java
Fix caching of skipEmptyBuckets for TimeseriesQuery.
Java
apache-2.0
adb3a6dec2ae37a9a243f14eb604ad972d80337e
0
tudarmstadt-lt/GermaNER,tudarmstadt-lt/GermaNER
/******************************************************************************* * Copyright 2014 * FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tu.darmstadt.lt.ner.writer; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngine; import java.io.FileOutputStream; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import org.apache.commons.io.IOUtils; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.fit.component.JCasConsumer_ImplBase; import org.apache.uima.fit.descriptor.ConfigurationParameter; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token; import de.tudarmstadt.ukp.dkpro.core.opennlp.OpenNlpSegmenter; /** * This is a helper Class, can be used from NoD. If you use a DKPro tokenizer during training, this * method use the same tokenizer available in DKPro, */ public class SentenceToCRFTestFileWriter extends JCasConsumer_ImplBase { public static final String SENTENCE_ITERATOR = "iterator"; @ConfigurationParameter(name = SENTENCE_ITERATOR, mandatory = true) private List<String> sentences = null; public static final String CRF_TEST_FILE_NAME = "crfFileName"; @ConfigurationParameter(name = CRF_TEST_FILE_NAME, mandatory = true) private String crfFileName = null; public static final String CRF_TEST_FILE_LANG = "crfTestFileLanguage"; @ConfigurationParameter(name = CRF_TEST_FILE_LANG, mandatory = false) private String crfTestFileLanguage = "de"; public static final String LF = System.getProperty("line.separator"); @Override public void process(JCas jcas) throws AnalysisEngineProcessException { try { StringBuilder sb = new StringBuilder(); int index = 0; for (String l: sentences) { Sentence sentence = new Sentence(jcas, index, l.length() + index); sentence.addToIndexes(); index = index + l.length() + 1; sb.append(l + "\n"); } jcas.setDocumentText(sb.toString().trim()); AnalysisEngine pipeline = createEngine(OpenNlpSegmenter.class, OpenNlpSegmenter.PARAM_LANGUAGE, crfTestFileLanguage, OpenNlpSegmenter.PARAM_WRITE_SENTENCE, false); pipeline.process(jcas); // get the token from jcas and convert it to CRF test file format. one token per line, // with // out gold. StringBuilder sbCRF = new StringBuilder(); Map<Sentence, Collection<Token>> sentencesTokens = JCasUtil.indexCovered(jcas, Sentence.class, Token.class); List<Sentence> sentences = new ArrayList<Sentence>(sentencesTokens.keySet()); // sort sentences by sentence Collections.sort(sentences, new Comparator<Sentence>() { @Override public int compare(Sentence arg0, Sentence arg1) { return arg0.getBegin() - arg1.getBegin(); } }); for (Sentence sentence : sentences) { for (Token token : sentencesTokens.get(sentence)) { sbCRF.append(token.getCoveredText() + LF); } sbCRF.append(LF); } IOUtils.write(sbCRF.toString(), new FileOutputStream(crfFileName), "UTF-8"); } catch (Exception e) { System.out.println(e); } } }
germanner/src/main/java/de/tu/darmstadt/lt/ner/writer/SentenceToCRFTestFileWriter.java
/******************************************************************************* * Copyright 2014 * FG Language Technology * Technische Universität Darmstadt * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package de.tu.darmstadt.lt.ner.writer; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Sentence; import de.tudarmstadt.ukp.dkpro.core.api.segmentation.type.Token; import de.tudarmstadt.ukp.dkpro.core.opennlp.OpenNlpSegmenter; import org.apache.commons.io.IOUtils; import org.apache.uima.analysis_engine.AnalysisEngine; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.fit.component.JCasConsumer_ImplBase; import org.apache.uima.fit.descriptor.ConfigurationParameter; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import java.io.FileOutputStream; import java.util.*; import static org.apache.uima.fit.factory.AnalysisEngineFactory.createEngine; /** * This is a helper Class, can be used from NoD. If you use a DKPro tokenizer during training, this * method use the same tokenizer available in DKPro, */ public class SentenceToCRFTestFileWriter extends JCasConsumer_ImplBase { public static final String SENTENCE_ITERATOR = "iterator"; @ConfigurationParameter(name = SENTENCE_ITERATOR, mandatory = true) private List<String> sentences = null; public static final String CRF_TEST_FILE_NAME = "crfFileName"; @ConfigurationParameter(name = CRF_TEST_FILE_NAME, mandatory = true) private String crfFileName = null; public static final String CRF_TEST_FILE_LANG = "crfTestFileLanguage"; @ConfigurationParameter(name = CRF_TEST_FILE_LANG, mandatory = false) private String crfTestFileLanguage = "de"; public static final String LF = System.getProperty("line.separator"); @Override public void process(JCas jcas) throws AnalysisEngineProcessException { try { StringBuilder sb = new StringBuilder(); int index = 0; for (String l: sentences) { Sentence sentence = new Sentence(jcas, index, l.length() + index); sentence.addToIndexes(); index = index + l.length() + 1; sb.append(l + "\n"); } jcas.setDocumentText(sb.toString().trim()); AnalysisEngine pipeline = createEngine(OpenNlpSegmenter.class, OpenNlpSegmenter.PARAM_LANGUAGE, crfTestFileLanguage, OpenNlpSegmenter.PARAM_CREATE_SENTENCES, false); pipeline.process(jcas); // get the token from jcas and convert it to CRF test file format. one token per line, // with // out gold. StringBuilder sbCRF = new StringBuilder(); Map<Sentence, Collection<Token>> sentencesTokens = JCasUtil.indexCovered(jcas, Sentence.class, Token.class); List<Sentence> sentences = new ArrayList<Sentence>(sentencesTokens.keySet()); // sort sentences by sentence Collections.sort(sentences, new Comparator<Sentence>() { @Override public int compare(Sentence arg0, Sentence arg1) { return arg0.getBegin() - arg1.getBegin(); } }); for (Sentence sentence : sentences) { for (Token token : sentencesTokens.get(sentence)) { sbCRF.append(token.getCoveredText() + LF); } sbCRF.append(LF); } IOUtils.write(sbCRF.toString(), new FileOutputStream(crfFileName), "UTF-8"); } catch (Exception e) { System.out.println(e); } } }
upgrade to dkpro 1.7
germanner/src/main/java/de/tu/darmstadt/lt/ner/writer/SentenceToCRFTestFileWriter.java
upgrade to dkpro 1.7
Java
apache-2.0
270ccae3b7583098b14d178b04ce163736d7ab7c
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.application.impl; import com.intellij.ide.IdeEventQueue; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.*; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.psi.PsiDocumentManager; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; /** * @author peter */ class AppUIExecutorImpl implements AppUIExecutor { private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.application.impl.AppUIExecutorImpl"); private final ModalityState myModality; private final Set<Disposable> myDisposables; private final ConstrainedExecutor[] myConstraints; AppUIExecutorImpl(ModalityState modality) { this(modality, Collections.emptySet(), new ConstrainedExecutor[]{new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return ApplicationManager.getApplication().isDispatchThread() && !ModalityState.current().dominates(modality); } @Override public void doExecute(@NotNull Runnable runnable) { ApplicationManager.getApplication().invokeLater(runnable, modality); } @Override public String toString() { return "onUiThread(" + modality + ")"; } }}); } private AppUIExecutorImpl(ModalityState modality, Set<Disposable> disposables, ConstrainedExecutor[] constraints) { myModality = modality; myConstraints = constraints; myDisposables = disposables; } @NotNull private AppUIExecutor withConstraint(ConstrainedExecutor element) { return new AppUIExecutorImpl(myModality, myDisposables, ArrayUtil.append(myConstraints, element)); } @NotNull @Override public AppUIExecutor later() { Integer edtEventCount = ApplicationManager.getApplication().isDispatchThread() ? IdeEventQueue.getInstance().getEventCount() : null; return withConstraint(new ConstrainedExecutor() { volatile boolean usedOnce; @Override public boolean isCorrectContext() { return edtEventCount == null ? ApplicationManager.getApplication().isDispatchThread() : edtEventCount != IdeEventQueue.getInstance().getEventCount() || usedOnce; } @Override public void doExecute(@NotNull Runnable runnable) { ApplicationManager.getApplication().invokeLater(() -> { usedOnce = true; runnable.run(); }, myModality); } @Override public String toString() { return "later"; } }); } @NotNull @Override public AppUIExecutor withDocumentsCommitted(@NotNull Project project) { return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return !PsiDocumentManager.getInstance(project).hasUncommitedDocuments(); } @Override public void doExecute(@NotNull Runnable runnable) { PsiDocumentManager.getInstance(project).performLaterWhenAllCommitted(runnable, myModality); } @Override public String toString() { return "withDocumentsCommitted"; } }).expireWith(project); } @NotNull @Override public AppUIExecutor inSmartMode(@NotNull Project project) { return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return !DumbService.getInstance(project).isDumb(); } @Override public void doExecute(@NotNull Runnable runnable) { DumbService.getInstance(project).smartInvokeLater(runnable, myModality); } @Override public String toString() { return "inSmartMode"; } }).expireWith(project); } @NotNull @Override public AppUIExecutor inTransaction(@NotNull Disposable parentDisposable) { TransactionId id = TransactionGuard.getInstance().getContextTransaction(); return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return TransactionGuard.getInstance().getContextTransaction() != null; } @Override public void doExecute(@NotNull Runnable runnable) { TransactionGuard.getInstance().submitTransaction(parentDisposable, id, runnable); } @Override public String toString() { return "inTransaction"; } }).expireWith(parentDisposable); } @NotNull @Override public AppUIExecutor expireWith(@NotNull Disposable parentDisposable) { if (myDisposables.contains(parentDisposable)) return this; Set<Disposable> disposables = ContainerUtil.newHashSet(myDisposables); disposables.add(parentDisposable); return new AppUIExecutorImpl(myModality, disposables, myConstraints); } @Override public void execute(@NotNull Runnable runnable) { CompletableFuture<Void> future = new CompletableFuture<>(); if (!myDisposables.isEmpty()) { List<Disposable> children = new ArrayList<>(); for (Disposable parent : myDisposables) { Disposable child = new Disposable() { @Override public void dispose() { future.cancel(false); } }; children.add(child); Disposer.register(parent, child); } future.whenComplete((v, t) -> children.forEach(Disposer::dispose)); } checkConstraints(runnable, future, new ArrayList<>()); } private void checkConstraints(@NotNull Runnable runnable, CompletableFuture<Void> future, List<ConstrainedExecutor> log) { Application app = ApplicationManager.getApplication(); if (!app.isDispatchThread()) { app.invokeLater(() -> checkConstraints(runnable, future, log), myModality); return; } if (future.isCancelled()) return; for (ConstrainedExecutor constraint : myConstraints) { if (!constraint.isCorrectContext()) { log.add(constraint); if (log.size() > 1000) { LOG.error("Too many reschedule requests, probably constraints can't be satisfied all together: " + log.subList(100, 120)); } constraint.execute(() -> checkConstraints(runnable, future, log)); return; } } try { runnable.run(); future.complete(null); } catch (Throwable e) { future.completeExceptionally(e); } } private abstract static class ConstrainedExecutor { public abstract boolean isCorrectContext(); public abstract void doExecute(Runnable r); public abstract String toString(); void execute(Runnable r) { doExecute(() -> { LOG.assertTrue(isCorrectContext(), this); r.run(); }); } } }
platform/platform-impl/src/com/intellij/openapi/application/impl/AppUIExecutorImpl.java
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.application.impl; import com.intellij.ide.IdeEventQueue; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.*; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Disposer; import com.intellij.psi.PsiDocumentManager; import com.intellij.util.ArrayUtil; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; /** * @author peter */ class AppUIExecutorImpl implements AppUIExecutor { private final ModalityState myModality; private final Set<Disposable> myDisposables; private final ConstrainedExecutor[] myConstraints; AppUIExecutorImpl(ModalityState modality) { this(modality, Collections.emptySet(), new ConstrainedExecutor[]{new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return ApplicationManager.getApplication().isDispatchThread() && !modality.dominates(ModalityState.current()); } @Override public void execute(@NotNull Runnable runnable) { ApplicationManager.getApplication().invokeLater(runnable, modality); } }}); } private AppUIExecutorImpl(ModalityState modality, Set<Disposable> disposables, ConstrainedExecutor[] constraints) { myModality = modality; myConstraints = constraints; myDisposables = disposables; } @NotNull private AppUIExecutor withConstraint(ConstrainedExecutor element) { return new AppUIExecutorImpl(myModality, myDisposables, ArrayUtil.append(myConstraints, element)); } @NotNull @Override public AppUIExecutor later() { Integer edtEventCount = ApplicationManager.getApplication().isDispatchThread() ? IdeEventQueue.getInstance().getEventCount() : null; return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return edtEventCount == null ? ApplicationManager.getApplication().isDispatchThread() : edtEventCount != IdeEventQueue.getInstance().getEventCount(); } @Override public void execute(@NotNull Runnable runnable) { ApplicationManager.getApplication().invokeLater(runnable, myModality); } }); } @NotNull @Override public AppUIExecutor withDocumentsCommitted(@NotNull Project project) { return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return !PsiDocumentManager.getInstance(project).hasUncommitedDocuments(); } @Override public void execute(@NotNull Runnable runnable) { PsiDocumentManager.getInstance(project).performLaterWhenAllCommitted(runnable, myModality); } }).expireWith(project); } @NotNull @Override public AppUIExecutor inSmartMode(@NotNull Project project) { return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return !DumbService.getInstance(project).isDumb(); } @Override public void execute(@NotNull Runnable runnable) { DumbService.getInstance(project).smartInvokeLater(runnable, myModality); } }).expireWith(project); } @NotNull @Override public AppUIExecutor inTransaction(@NotNull Disposable parentDisposable) { TransactionId id = TransactionGuard.getInstance().getContextTransaction(); return withConstraint(new ConstrainedExecutor() { @Override public boolean isCorrectContext() { return TransactionGuard.getInstance().getContextTransaction() != null; } @Override public void execute(@NotNull Runnable runnable) { TransactionGuard.getInstance().submitTransaction(parentDisposable, id, runnable); } }).expireWith(parentDisposable); } @NotNull @Override public AppUIExecutor expireWith(@NotNull Disposable parentDisposable) { if (myDisposables.contains(parentDisposable)) return this; Set<Disposable> disposables = ContainerUtil.newHashSet(myDisposables); disposables.add(parentDisposable); return new AppUIExecutorImpl(myModality, disposables, myConstraints); } @Override public void execute(@NotNull Runnable runnable) { CompletableFuture<Void> future = new CompletableFuture<>(); if (!myDisposables.isEmpty()) { List<Disposable> children = new ArrayList<>(); for (Disposable parent : myDisposables) { Disposable child = new Disposable() { @Override public void dispose() { future.cancel(false); } }; children.add(child); Disposer.register(parent, child); } future.whenComplete((v, t) -> children.forEach(Disposer::dispose)); } checkConstraints(runnable, future); } private void checkConstraints(@NotNull Runnable runnable, CompletableFuture<Void> future) { Application app = ApplicationManager.getApplication(); if (!app.isDispatchThread()) { app.invokeLater(() -> checkConstraints(runnable, future), myModality); return; } if (future.isCancelled()) return; for (ConstrainedExecutor constraint : myConstraints) { if (!constraint.isCorrectContext()) { constraint.execute(() -> checkConstraints(runnable, future)); return; } } try { runnable.run(); future.complete(null); } catch (Throwable e) { future.completeExceptionally(e); } } private interface ConstrainedExecutor extends Executor { boolean isCorrectContext(); } }
AppUIExecutorImpl: prevent endless loops fix one found cause: incorrectly flipped ModalityState#dominates call add diagnostics when the loop happens in future adjust "later" to prevent false positives in that diagnostics
platform/platform-impl/src/com/intellij/openapi/application/impl/AppUIExecutorImpl.java
AppUIExecutorImpl: prevent endless loops
Java
apache-2.0
177dc5b2ea9acd715a7fde00e31c54b25b6222ba
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon; import com.intellij.codeInsight.daemon.impl.analysis.XmlPathReferenceInspection; import com.intellij.codeInsight.daemon.impl.analysis.XmlUnboundNsPrefixInspection; import com.intellij.codeInspection.LocalInspectionTool; import com.intellij.codeInspection.htmlInspections.RequiredAttributesInspection; import com.intellij.javaee.ExternalResourceManagerExImpl; import com.intellij.openapi.diagnostic.Logger; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.xml.util.CheckDtdReferencesInspection; import com.intellij.xml.util.CheckXmlFileWithXercesValidatorInspection; import com.intellij.xml.util.XmlDuplicatedIdInspection; import com.intellij.xml.util.XmlInvalidIdInspection; import java.io.File; public class XmlStressTest extends DaemonAnalyzerTestCase { private static final Logger LOG = Logger.getInstance(XmlStressTest.class); public void testSchemaValidator() throws Exception { for (int i = 0; i < 100; i++) { doTest("xml/WsdlValidation.wsdl", false, false); LOG.debug(String.valueOf(i)); } } private static final String BASE_PATH = "/xml/"; @Override protected LocalInspectionTool[] configureLocalInspectionTools() { return new LocalInspectionTool[]{ new RequiredAttributesInspection(), new XmlDuplicatedIdInspection(), new XmlInvalidIdInspection(), new CheckDtdReferencesInspection(), new XmlUnboundNsPrefixInspection(), new XmlPathReferenceInspection() }; } @Override public void setUp() throws Exception { super.setUp(); enableInspectionTool(new CheckXmlFileWithXercesValidatorInspection()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/wsdl/", getTestDataPath() + BASE_PATH + "wsdl11.xsd", getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/wsdl/soap/", getTestDataPath() + BASE_PATH + "wsdl11_soapbinding.xsd", getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/soap/encoding/", getTestDataPath() + BASE_PATH + "soap-encoding.xsd", getTestRootDisposable()); } @Override protected String getTestDataPath() { return PlatformTestUtil.getCommunityPath().replace(File.separatorChar, '/') + "/xml/tests/testData/"; } }
xml/tests/src/com/intellij/codeInsight/daemon/XmlStressTest.java
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.daemon; import com.intellij.codeInsight.daemon.impl.analysis.XmlPathReferenceInspection; import com.intellij.codeInsight.daemon.impl.analysis.XmlUnboundNsPrefixInspection; import com.intellij.codeInspection.LocalInspectionTool; import com.intellij.codeInspection.htmlInspections.RequiredAttributesInspection; import com.intellij.javaee.ExternalResourceManagerExImpl; import com.intellij.testFramework.PlatformTestUtil; import com.intellij.xml.util.CheckDtdReferencesInspection; import com.intellij.xml.util.CheckXmlFileWithXercesValidatorInspection; import com.intellij.xml.util.XmlDuplicatedIdInspection; import com.intellij.xml.util.XmlInvalidIdInspection; import java.io.File; public class XmlStressTest extends DaemonAnalyzerTestCase { public void testSchemaValidator() throws Exception { for (int i = 0; i < 100; i++) { doTest("xml/WsdlValidation.wsdl", false, false); System.out.println(i); } } private static final String BASE_PATH = "/xml/"; @Override protected LocalInspectionTool[] configureLocalInspectionTools() { return new LocalInspectionTool[]{ new RequiredAttributesInspection(), new XmlDuplicatedIdInspection(), new XmlInvalidIdInspection(), new CheckDtdReferencesInspection(), new XmlUnboundNsPrefixInspection(), new XmlPathReferenceInspection() }; } @Override public void setUp() throws Exception { super.setUp(); enableInspectionTool(new CheckXmlFileWithXercesValidatorInspection()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/wsdl/", getTestDataPath() + BASE_PATH + "wsdl11.xsd", getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/wsdl/soap/", getTestDataPath() + BASE_PATH + "wsdl11_soapbinding.xsd", getTestRootDisposable()); ExternalResourceManagerExImpl.registerResourceTemporarily("http://schemas.xmlsoap.org/soap/encoding/", getTestDataPath() + BASE_PATH + "soap-encoding.xsd", getTestRootDisposable()); } @Override protected String getTestDataPath() { return PlatformTestUtil.getCommunityPath().replace(File.separatorChar, '/') + "/xml/tests/testData/"; } }
removed spam from console
xml/tests/src/com/intellij/codeInsight/daemon/XmlStressTest.java
removed spam from console
Java
bsd-3-clause
71c166689723a0208c455823713a0663db1b8012
0
mkoistinen/JBookTrader,GabrielDancause/jbooktrader,GabrielDancause/jbooktrader,mkoistinen/JBookTrader,mkoistinen/JBookTrader,GabrielDancause/jbooktrader
package com.jbooktrader.platform.chart; import com.jbooktrader.platform.indicator.*; import com.jbooktrader.platform.marketbook.*; import com.jbooktrader.platform.util.*; import org.jfree.data.time.*; import org.jfree.data.xy.*; import java.util.*; /** * Encapsulates performance chart data. */ public class PerformanceChartData { private final TimeSeries netProfit; private final BarSize barSize; private final List<OHLCDataItem> prices; private Bar priceBar; private final Map<String, Bar> indicatorBars; private final Map<String, List<OHLCDataItem>> indicators; public PerformanceChartData(BarSize barSize) { this.barSize = barSize; netProfit = new TimeSeries("Net Profit"); prices = new ArrayList<OHLCDataItem>(); indicatorBars = new HashMap<String, Bar>(); indicators = new HashMap<String, List<OHLCDataItem>>(); } public List<OHLCDataItem> getPrices() { return prices; } public boolean isEmpty() { return prices.size() == 0; } public void addIndicator(Indicator indicator) { indicators.put(indicator.getName(), new ArrayList<OHLCDataItem>()); } public void updateNetProfit(TimedValue profitAndLoss) { netProfit.addOrUpdate(new Second(new Date(profitAndLoss.getTime())), profitAndLoss.getValue()); } public TimeSeries getProfitAndLossSeries() { return netProfit; } public void updateIndicators(List<Indicator> indicatorsToUpdate, long time) { long frequency = barSize.getSize(); for (Indicator indicator : indicatorsToUpdate) { double value = indicator.getValue(); // Integer division gives us the number of whole periods long completedPeriods = time / frequency; long barTime = (completedPeriods + 1) * frequency; Bar indicatorBar = indicatorBars.get(indicator.getName()); if (indicatorBar == null) { indicatorBar = new Bar(barTime, value); indicatorBars.put(indicator.getName(), indicatorBar); } if (barTime > indicatorBar.getTime()) { Date date = new Date(indicatorBar.getTime()); OHLCDataItem item = new OHLCDataItem(date, indicatorBar.getOpen(), indicatorBar.getHigh(), indicatorBar.getLow(), indicatorBar.getClose(), 0); List<OHLCDataItem> ind = indicators.get(indicator.getName()); ind.add(item); indicatorBar = new Bar(barTime, value); indicatorBars.put(indicator.getName(), indicatorBar); } indicatorBar.setClose(value); indicatorBar.setLow(Math.min(value, indicatorBar.getLow())); indicatorBar.setHigh(Math.max(value, indicatorBar.getHigh())); } } public void updatePrice(MarketSnapshot marketSnapshot) { long frequency = barSize.getSize(); long time = marketSnapshot.getTime(); double price = marketSnapshot.getPrice(); // Integer division gives us the number of whole periods long completedPeriods = time / frequency; long barTime = (completedPeriods + 1) * frequency; if (priceBar == null) { priceBar = new Bar(barTime, price); } if (barTime > priceBar.getTime()) { Date date = new Date(priceBar.getTime()); OHLCDataItem item = new OHLCDataItem(date, priceBar.getOpen(), priceBar.getHigh(), priceBar.getLow(), priceBar.getClose(), 0); prices.add(item); priceBar = new Bar(barTime, price); } priceBar.setClose(price); priceBar.setLow(Math.min(price, priceBar.getLow())); priceBar.setHigh(Math.max(price, priceBar.getHigh())); } public OHLCDataset getPriceDataset() { return new DefaultOHLCDataset("", prices.toArray(new OHLCDataItem[prices.size()])); } public OHLCDataset getIndicatorDataset(Indicator indicator) { List<OHLCDataItem> ind = indicators.get(indicator.getName()); return new DefaultOHLCDataset("", ind.toArray(new OHLCDataItem[ind.size()])); } }
source/com/jbooktrader/platform/chart/PerformanceChartData.java
package com.jbooktrader.platform.chart; import com.jbooktrader.platform.indicator.*; import com.jbooktrader.platform.marketbook.*; import com.jbooktrader.platform.util.*; import org.jfree.data.time.*; import org.jfree.data.xy.*; import java.util.*; /** * Encapsulates performance chart data. */ public class PerformanceChartData { private final TimeSeries netProfit; private final BarSize barSize; private final List<OHLCDataItem> prices; private Bar priceBar; private final Map<String, Bar> indicatorBars; private final Map<String, List<OHLCDataItem>> indicators; public PerformanceChartData(BarSize barSize) { this.barSize = barSize; netProfit = new TimeSeries("Net Profit", Second.class); netProfit.setRangeDescription("Net Profit"); prices = new ArrayList<OHLCDataItem>(); indicatorBars = new HashMap<String, Bar>(); indicators = new HashMap<String, List<OHLCDataItem>>(); } public List<OHLCDataItem> getPrices() { return prices; } public boolean isEmpty() { return prices.size() == 0; } public void addIndicator(Indicator indicator) { indicators.put(indicator.getName(), new ArrayList<OHLCDataItem>()); } public void updateNetProfit(TimedValue profitAndLoss) { netProfit.addOrUpdate(new Second(new Date(profitAndLoss.getTime())), profitAndLoss.getValue()); } public TimeSeries getProfitAndLossSeries() { return netProfit; } public void updateIndicators(List<Indicator> indicatorsToUpdate, long time) { long frequency = barSize.getSize(); for (Indicator indicator : indicatorsToUpdate) { double value = indicator.getValue(); // Integer division gives us the number of whole periods long completedPeriods = time / frequency; long barTime = (completedPeriods + 1) * frequency; Bar indicatorBar = indicatorBars.get(indicator.getName()); if (indicatorBar == null) { indicatorBar = new Bar(barTime, value); indicatorBars.put(indicator.getName(), indicatorBar); } if (barTime > indicatorBar.getTime()) { Date date = new Date(indicatorBar.getTime()); OHLCDataItem item = new OHLCDataItem(date, indicatorBar.getOpen(), indicatorBar.getHigh(), indicatorBar.getLow(), indicatorBar.getClose(), 0); List<OHLCDataItem> ind = indicators.get(indicator.getName()); ind.add(item); indicatorBar = new Bar(barTime, value); indicatorBars.put(indicator.getName(), indicatorBar); } indicatorBar.setClose(value); indicatorBar.setLow(Math.min(value, indicatorBar.getLow())); indicatorBar.setHigh(Math.max(value, indicatorBar.getHigh())); } } public void updatePrice(MarketSnapshot marketSnapshot) { long frequency = barSize.getSize(); long time = marketSnapshot.getTime(); double price = marketSnapshot.getPrice(); // Integer division gives us the number of whole periods long completedPeriods = time / frequency; long barTime = (completedPeriods + 1) * frequency; if (priceBar == null) { priceBar = new Bar(barTime, price); } if (barTime > priceBar.getTime()) { Date date = new Date(priceBar.getTime()); OHLCDataItem item = new OHLCDataItem(date, priceBar.getOpen(), priceBar.getHigh(), priceBar.getLow(), priceBar.getClose(), 0); prices.add(item); priceBar = new Bar(barTime, price); } priceBar.setClose(price); priceBar.setLow(Math.min(price, priceBar.getLow())); priceBar.setHigh(Math.max(price, priceBar.getHigh())); } public OHLCDataset getPriceDataset() { return new DefaultOHLCDataset("", prices.toArray(new OHLCDataItem[prices.size()])); } public OHLCDataset getIndicatorDataset(Indicator indicator) { List<OHLCDataItem> ind = indicators.get(indicator.getName()); return new DefaultOHLCDataset("", ind.toArray(new OHLCDataItem[ind.size()])); } }
Minor refactoring
source/com/jbooktrader/platform/chart/PerformanceChartData.java
Minor refactoring
Java
bsd-3-clause
bcdd08491793a379347f72fc60943ef8534b517c
0
Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo,Caleydo/caleydo
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.view.histogram.v2; import gleem.linalg.Vec2f; import java.util.ArrayList; import java.util.List; import javax.media.opengl.GL2; import org.caleydo.core.data.collection.EDimension; import org.caleydo.core.event.EventListenerManager.ListenTo; import org.caleydo.core.util.base.ILabeled; import org.caleydo.core.util.color.Color; import org.caleydo.core.util.color.mapping.UpdateColorMappingEvent; import org.caleydo.core.util.format.Formatter; import org.caleydo.core.util.function.AdvancedDoubleStatistics; import org.caleydo.core.util.function.ArrayDoubleList; import org.caleydo.core.util.function.DoubleFunctions; import org.caleydo.core.util.function.IDoubleFunction; import org.caleydo.core.util.function.IDoubleIterator; import org.caleydo.core.util.function.IDoubleList; import org.caleydo.core.view.opengl.canvas.EDetailLevel; import org.caleydo.core.view.opengl.layout.Column.VAlign; import org.caleydo.core.view.opengl.layout2.GLGraphics; import org.caleydo.core.view.opengl.layout2.PickableGLElement; import org.caleydo.core.view.opengl.layout2.basic.ScrollingDecorator.IHasMinSize; import org.caleydo.core.view.opengl.layout2.renderer.GLRenderers; import com.google.common.primitives.Doubles; /** * renders an box and whiskers plot for numerical data domains * * @author Samuel Gratzl */ public abstract class ABoxAndWhiskersElement extends PickableGLElement implements ILabeled, IHasMinSize { private static final int TICK_STEP = 50; /** * height of the box in percentage of the total height */ private static final float BOX_HEIGHT_PERCENTAGE = 1 / 3.f; private static final float LINE_TAIL_HEIGHT_PERCENTAGE = 0.75f; private final EDetailLevel detailLevel; private final EDimension direction; private final boolean showOutlier; private final boolean showMinMax; private boolean showScale = false; private AdvancedDoubleStatistics stats; /** * the min, max value to use for normalization */ private double min; /** * the min, max value to use for normalization */ private double max; /** * value which is just above the <code>25 quartile - iqr*1.5</code> margin */ private double nearestIQRMin; /** * value which is just below the <code>75 quartile + iqr*1.5</code> margin */ private double nearestIQRMax; private IDoubleList outliers; public ABoxAndWhiskersElement(EDetailLevel detailLevel, EDimension direction, boolean showOutlier, boolean showMinMax) { this.detailLevel = detailLevel; this.direction = direction; this.showOutlier = showOutlier; this.showMinMax = showMinMax; } /** * @param value * @return */ protected double normalize(double value) { return DoubleFunctions.normalize(min, max).apply(value); } /** * @return the direction, see {@link #direction} */ public EDimension getDirection() { return direction; } /** * @return the showScale, see {@link #showScale} */ public boolean isShowScale() { return showScale; } /** * @param showScale * setter, see {@link showScale} */ public void setShowScale(boolean showScale) { this.showScale = showScale; } protected abstract Color getColor(); @Override public String getTooltip() { if (stats == null) return null; StringBuilder b = new StringBuilder(); b.append(getLabel()).append('\n'); b.append(String.format("%s:\t%d", "count", stats.getN())); if (stats.getNaNs() > 0) { b.append(String.format("(+%d invalid)\n", stats.getNaNs())); } else b.append('\n'); b.append(String.format("%s:\t%.3f\n", "median", stats.getMedian())); b.append(String.format("%s:\t%.3f\n", "mean", stats.getMean())); b.append(String.format("%s:\t%.3f\n", "median", stats.getMedian())); b.append(String.format("%s:\t%.3f\n", "sd", stats.getSd())); b.append(String.format("%s:\t%.3f\n", "var", stats.getVar())); b.append(String.format("%s:\t%.3f\n", "mad", stats.getMedianAbsoluteDeviation())); b.append(String.format("%s:\t%.3f\n", "min", stats.getMin())); b.append(String.format("%s:\t%.3f", "max", stats.getMax())); return b.toString(); } public final void setData(IDoubleList list) { setData(list, Double.NaN, Double.NaN); } public void setData(IDoubleList list, double min, double max) { this.stats = AdvancedDoubleStatistics.of(list); updateIQRMatches(list); this.min = Double.isNaN(min) ? stats.getMin() : min; this.max = Double.isNaN(max) ? stats.getMax() : max; repaint(); } public final void setData(AdvancedDoubleStatistics stats) { setData(stats, Double.NaN, Double.NaN); } public void setData(AdvancedDoubleStatistics stats, double min, double max) { this.stats = stats; if (stats != null) { updateIQRMatches(null); this.min = Double.isNaN(min) ? stats.getMin() : min; this.max = Double.isNaN(max) ? stats.getMax() : max; } repaint(); } private void updateIQRMatches(IDoubleList l) { final double lowerIQRBounds = stats.getQuartile25() - stats.getIQR() * 1.5; final double upperIQRBounds = stats.getQuartile75() + stats.getIQR() * 1.5; if (l == null) { // invalid raw data nearestIQRMin = lowerIQRBounds; nearestIQRMax = upperIQRBounds; outliers = null; return; } nearestIQRMin = upperIQRBounds; nearestIQRMax = lowerIQRBounds; // values which are out of the iqr bounds List<Double> outliers = new ArrayList<>(); // find the values which are at the within iqr bounds for (IDoubleIterator it = l.iterator(); it.hasNext();) { double v = it.nextPrimitive(); if (Double.isNaN(v)) continue; if (v > lowerIQRBounds && v < nearestIQRMin) nearestIQRMin = v; if (v < upperIQRBounds && v > nearestIQRMax) nearestIQRMax = v; // optionally compute the outliers if (showOutlier && (v < lowerIQRBounds || v > upperIQRBounds)) outliers.add(v); } this.outliers = new ArrayDoubleList(Doubles.toArray(outliers)); } /** * @param renderBackground * setter, see {@link renderBackground} */ public void setRenderBackground(boolean renderBackground) { if (renderBackground) setRenderer(GLRenderers.fillRect(Color.WHITE)); else setRenderer(null); } @Override protected void renderImpl(GLGraphics g, float w, float h) { super.renderImpl(g, w, h); if (stats == null) return; if (direction.isHorizontal()) { g.save().gl.glRotatef(90, 0, 0, 1); renderBoxAndWhiskers(g, h, w); g.restore(); } else renderBoxAndWhiskers(g, w, h); if (showScale && detailLevel == EDetailLevel.HIGH) renderScale(g, w, h); } private void renderBoxAndWhiskers(GLGraphics g, float w, float h) { final float hi = h * BOX_HEIGHT_PERCENTAGE; final float y = (h - hi) * 0.5f; final float center = h / 2; IDoubleFunction normalize = DoubleFunctions.normalize(min, max); { final float firstQuantrileBoundary = (float) (normalize.apply(stats.getQuartile25())) * w; final float thirdQuantrileBoundary = (float) (normalize.apply(stats.getQuartile75())) * w; g.color(getColor()) .fillRect(firstQuantrileBoundary, y, thirdQuantrileBoundary - firstQuantrileBoundary, hi); g.color(Color.BLACK).drawRect(firstQuantrileBoundary, y, thirdQuantrileBoundary - firstQuantrileBoundary, hi); final float iqrMin = (float) normalize.apply(nearestIQRMin) * w; final float iqrMax = (float) normalize.apply(nearestIQRMax) * w; // Median float median = (float) normalize.apply(stats.getMedian()) * w; g.color(0.2f, 0.2f, 0.2f).drawLine(median, y, median, y + hi); // Whiskers g.color(0, 0, 0); // line to whiskers g.drawLine(iqrMin, center, firstQuantrileBoundary, center); g.drawLine(iqrMax, center, thirdQuantrileBoundary, center); float h_whiskers = hi * LINE_TAIL_HEIGHT_PERCENTAGE; g.drawLine(iqrMin, center - h_whiskers * 0.5f, iqrMin, center + h_whiskers * 0.5f); g.drawLine(iqrMax, center - h_whiskers * 0.5f, iqrMax, center + h_whiskers * 0.5f); } renderOutliers(g, w, hi, center, normalize); if (showMinMax) { g.gl.glPushAttrib(GL2.GL_POINT_BIT); g.gl.glPointSize(2f); g.color(0f, 0f, 0f, 1f); float min = (float) normalize.apply(stats.getMin()) * w; float max = (float) normalize.apply(stats.getMax()) * w; g.drawPoint(min, center); g.drawPoint(max, center); g.gl.glPopAttrib(); } } private void renderOutliers(GLGraphics g, float w, final float hi, final float center, IDoubleFunction normalize) { if (!showOutlier || outliers == null) return; g.color(0.2f, 0.2f, 0.2f, outlierAlhpa(outliers.size())); g.gl.glPushAttrib(GL2.GL_POINT_BIT); g.gl.glPointSize(2f); for (IDoubleIterator it = outliers.iterator(); it.hasNext();) { float v = (float) normalize.apply(it.nextPrimitive()) * w; g.drawPoint(v, center); } g.gl.glPopAttrib(); } private float outlierAlhpa(int size) { if (size < 10) return 1.0f; float v = 5.0f / size; if (v < 0.05f) return 0.05f; if (v > 1) return 1; return v; } private void renderScale(GLGraphics g, float w, float h) { float hi = h * 0.85f; g.color(Color.BLACK).drawLine(0, hi, w, hi); final int ticks = numberOfTicks(w); float delta = w / ticks; float x = delta; final float v_delta = 1.f / ticks; IDoubleFunction f = DoubleFunctions.unnormalize(min, max); final int textHeight = 10; g.drawText(Formatter.formatNumber(f.apply(0)), 1, hi, delta, textHeight); for (int i = 1; i < ticks; ++i) { g.drawText(Formatter.formatNumber(f.apply(v_delta * i)), x - delta * 0.5f, hi, delta, textHeight, VAlign.CENTER); } g.drawText(Formatter.formatNumber(f.apply(1)), w - delta - 1, hi, delta - 1, textHeight, VAlign.RIGHT); } /** * @param w * @return */ private static int numberOfTicks(float w) { return Math.max(Math.round(w / TICK_STEP), 1); } @ListenTo private void onColorMappingUpdate(UpdateColorMappingEvent event) { repaint(); } @Override public final Vec2f getMinSize() { switch (detailLevel) { case HIGH: return new Vec2f(300, 90); case MEDIUM: return new Vec2f(100, 30); default: return new Vec2f(40, 15); } } }
org.caleydo.view.histogram/src/org/caleydo/view/histogram/v2/ABoxAndWhiskersElement.java
/******************************************************************************* * Caleydo - Visualization for Molecular Biology - http://caleydo.org * Copyright (c) The Caleydo Team. All rights reserved. * Licensed under the new BSD license, available at http://caleydo.org/license ******************************************************************************/ package org.caleydo.view.histogram.v2; import gleem.linalg.Vec2f; import java.util.ArrayList; import java.util.List; import javax.media.opengl.GL2; import org.caleydo.core.data.collection.EDimension; import org.caleydo.core.event.EventListenerManager.ListenTo; import org.caleydo.core.util.base.ILabeled; import org.caleydo.core.util.color.Color; import org.caleydo.core.util.color.mapping.UpdateColorMappingEvent; import org.caleydo.core.util.format.Formatter; import org.caleydo.core.util.function.AdvancedDoubleStatistics; import org.caleydo.core.util.function.ArrayDoubleList; import org.caleydo.core.util.function.DoubleFunctions; import org.caleydo.core.util.function.IDoubleFunction; import org.caleydo.core.util.function.IDoubleIterator; import org.caleydo.core.util.function.IDoubleList; import org.caleydo.core.view.opengl.canvas.EDetailLevel; import org.caleydo.core.view.opengl.layout.Column.VAlign; import org.caleydo.core.view.opengl.layout2.GLGraphics; import org.caleydo.core.view.opengl.layout2.PickableGLElement; import org.caleydo.core.view.opengl.layout2.basic.ScrollingDecorator.IHasMinSize; import org.caleydo.core.view.opengl.layout2.renderer.GLRenderers; import com.google.common.primitives.Doubles; /** * renders an box and whiskers plot for numerical data domains * * @author Samuel Gratzl */ public abstract class ABoxAndWhiskersElement extends PickableGLElement implements ILabeled, IHasMinSize { private static final int TICK_STEP = 50; /** * height of the box in percentage of the total height */ private static final float BOX_HEIGHT_PERCENTAGE = 1 / 3.f; private static final float LINE_TAIL_HEIGHT_PERCENTAGE = 0.75f; private final EDetailLevel detailLevel; private final EDimension direction; private final boolean showOutlier; private final boolean showMinMax; private boolean showScale = false; private AdvancedDoubleStatistics stats; /** * the min, max value to use for normalization */ private double min; /** * the min, max value to use for normalization */ private double max; /** * value which is just above the <code>25 quartile - iqr*1.5</code> margin */ private double nearestIQRMin; /** * value which is just below the <code>75 quartile + iqr*1.5</code> margin */ private double nearestIQRMax; private IDoubleList outliers; public ABoxAndWhiskersElement(EDetailLevel detailLevel, EDimension direction, boolean showOutlier, boolean showMinMax) { this.detailLevel = detailLevel; this.direction = direction; this.showOutlier = showOutlier; this.showMinMax = showMinMax; } /** * @param value * @return */ protected double normalize(double value) { return DoubleFunctions.normalize(min, max).apply(value); } /** * @return the direction, see {@link #direction} */ public EDimension getDirection() { return direction; } /** * @return the showScale, see {@link #showScale} */ public boolean isShowScale() { return showScale; } /** * @param showScale * setter, see {@link showScale} */ public void setShowScale(boolean showScale) { this.showScale = showScale; } protected abstract Color getColor(); @Override public String getTooltip() { if (stats == null) return null; StringBuilder b = new StringBuilder(); b.append(getLabel()).append('\n'); b.append(String.format("%s:\t%.3f\n", "mean", stats.getMean())); b.append(String.format("%s:\t%.3f\n", "median", stats.getMedian())); b.append(String.format("%s:\t%.3f\n", "sd", stats.getSd())); b.append(String.format("%s:\t%.3f\n", "var", stats.getVar())); b.append(String.format("%s:\t%.3f\n", "mad", stats.getMedianAbsoluteDeviation())); b.append(String.format("%s:\t%.3f\n", "min", stats.getMin())); b.append(String.format("%s:\t%.3f", "max", stats.getMax())); return b.toString(); } public final void setData(IDoubleList list) { setData(list, Double.NaN, Double.NaN); } public void setData(IDoubleList list, double min, double max) { this.stats = AdvancedDoubleStatistics.of(list); updateIQRMatches(list); this.min = Double.isNaN(min) ? stats.getMin() : min; this.max = Double.isNaN(max) ? stats.getMax() : max; repaint(); } public final void setData(AdvancedDoubleStatistics stats) { setData(stats, Double.NaN, Double.NaN); } public void setData(AdvancedDoubleStatistics stats, double min, double max) { this.stats = stats; if (stats != null) { updateIQRMatches(null); this.min = Double.isNaN(min) ? stats.getMin() : min; this.max = Double.isNaN(max) ? stats.getMax() : max; } repaint(); } private void updateIQRMatches(IDoubleList l) { final double lowerIQRBounds = stats.getQuartile25() - stats.getIQR() * 1.5; final double upperIQRBounds = stats.getQuartile75() + stats.getIQR() * 1.5; if (l == null) { // invalid raw data nearestIQRMin = lowerIQRBounds; nearestIQRMax = upperIQRBounds; outliers = null; return; } nearestIQRMin = upperIQRBounds; nearestIQRMax = lowerIQRBounds; // values which are out of the iqr bounds List<Double> outliers = new ArrayList<>(); // find the values which are at the within iqr bounds for (IDoubleIterator it = l.iterator(); it.hasNext();) { double v = it.nextPrimitive(); if (Double.isNaN(v)) continue; if (v > lowerIQRBounds && v < nearestIQRMin) nearestIQRMin = v; if (v < upperIQRBounds && v > nearestIQRMax) nearestIQRMax = v; // optionally compute the outliers if (showOutlier && (v < lowerIQRBounds || v > upperIQRBounds)) outliers.add(v); } this.outliers = new ArrayDoubleList(Doubles.toArray(outliers)); } /** * @param renderBackground * setter, see {@link renderBackground} */ public void setRenderBackground(boolean renderBackground) { if (renderBackground) setRenderer(GLRenderers.fillRect(Color.WHITE)); else setRenderer(null); } @Override protected void renderImpl(GLGraphics g, float w, float h) { super.renderImpl(g, w, h); if (stats == null) return; if (direction.isHorizontal()) { g.save().gl.glRotatef(90, 0, 0, 1); renderBoxAndWhiskers(g, h, w); g.restore(); } else renderBoxAndWhiskers(g, w, h); if (showScale && detailLevel == EDetailLevel.HIGH) renderScale(g, w, h); } private void renderBoxAndWhiskers(GLGraphics g, float w, float h) { final float hi = h * BOX_HEIGHT_PERCENTAGE; final float y = (h - hi) * 0.5f; final float center = h / 2; IDoubleFunction normalize = DoubleFunctions.normalize(min, max); { final float firstQuantrileBoundary = (float) (normalize.apply(stats.getQuartile25())) * w; final float thirdQuantrileBoundary = (float) (normalize.apply(stats.getQuartile75())) * w; g.color(getColor()) .fillRect(firstQuantrileBoundary, y, thirdQuantrileBoundary - firstQuantrileBoundary, hi); g.color(Color.BLACK).drawRect(firstQuantrileBoundary, y, thirdQuantrileBoundary - firstQuantrileBoundary, hi); final float iqrMin = (float) normalize.apply(nearestIQRMin) * w; final float iqrMax = (float) normalize.apply(nearestIQRMax) * w; // Median float median = (float) normalize.apply(stats.getMedian()) * w; g.color(0.2f, 0.2f, 0.2f).drawLine(median, y, median, y + hi); // Whiskers g.color(0, 0, 0); // line to whiskers g.drawLine(iqrMin, center, firstQuantrileBoundary, center); g.drawLine(iqrMax, center, thirdQuantrileBoundary, center); float h_whiskers = hi * LINE_TAIL_HEIGHT_PERCENTAGE; g.drawLine(iqrMin, center - h_whiskers * 0.5f, iqrMin, center + h_whiskers * 0.5f); g.drawLine(iqrMax, center - h_whiskers * 0.5f, iqrMax, center + h_whiskers * 0.5f); } renderOutliers(g, w, hi, center, normalize); if (showMinMax) { g.gl.glPushAttrib(GL2.GL_POINT_BIT); g.gl.glPointSize(2f); g.color(0f, 0f, 0f, 1f); float min = (float) normalize.apply(stats.getMin()) * w; float max = (float) normalize.apply(stats.getMax()) * w; g.drawPoint(min, center); g.drawPoint(max, center); g.gl.glPopAttrib(); } } private void renderOutliers(GLGraphics g, float w, final float hi, final float center, IDoubleFunction normalize) { if (!showOutlier || outliers == null) return; g.color(0.2f, 0.2f, 0.2f, outlierAlhpa(outliers.size())); g.gl.glPushAttrib(GL2.GL_POINT_BIT); g.gl.glPointSize(2f); for (IDoubleIterator it = outliers.iterator(); it.hasNext();) { float v = (float) normalize.apply(it.nextPrimitive()) * w; g.drawPoint(v, center); } g.gl.glPopAttrib(); } private float outlierAlhpa(int size) { if (size < 10) return 1.0f; float v = 5.0f / size; if (v < 0.05f) return 0.05f; if (v > 1) return 1; return v; } private void renderScale(GLGraphics g, float w, float h) { float hi = h * 0.85f; g.color(Color.BLACK).drawLine(0, hi, w, hi); final int ticks = numberOfTicks(w); float delta = w / ticks; float x = delta; final float v_delta = 1.f / ticks; IDoubleFunction f = DoubleFunctions.unnormalize(min, max); final int textHeight = 10; g.drawText(Formatter.formatNumber(f.apply(0)), 1, hi, delta, textHeight); for (int i = 1; i < ticks; ++i) { g.drawText(Formatter.formatNumber(f.apply(v_delta * i)), x - delta * 0.5f, hi, delta, textHeight, VAlign.CENTER); } g.drawText(Formatter.formatNumber(f.apply(1)), w - delta - 1, hi, delta - 1, textHeight, VAlign.RIGHT); } /** * @param w * @return */ private static int numberOfTicks(float w) { return Math.max(Math.round(w / TICK_STEP), 1); } @ListenTo private void onColorMappingUpdate(UpdateColorMappingEvent event) { repaint(); } @Override public final Vec2f getMinSize() { switch (detailLevel) { case HIGH: return new Vec2f(300, 90); case MEDIUM: return new Vec2f(100, 30); default: return new Vec2f(40, 15); } } }
fix #1790 box plot: show number of items
org.caleydo.view.histogram/src/org/caleydo/view/histogram/v2/ABoxAndWhiskersElement.java
fix #1790 box plot: show number of items
Java
mit
dbdbecc6c23fe7b87c65d1fd19918c18b93f7845
0
McJty/RFTools
package mcjty.rftools.blocks.storagemonitor; import mcjty.lib.base.StyleConfig; import mcjty.lib.container.GenericGuiContainer; import mcjty.lib.entity.GenericEnergyStorageTileEntity; import mcjty.lib.gui.Window; import mcjty.lib.gui.events.BlockRenderEvent; import mcjty.lib.gui.events.DefaultSelectionEvent; import mcjty.lib.gui.layout.HorizontalAlignment; import mcjty.lib.gui.layout.HorizontalLayout; import mcjty.lib.gui.layout.PositionalLayout; import mcjty.lib.gui.layout.VerticalLayout; import mcjty.lib.gui.widgets.*; import mcjty.lib.gui.widgets.Button; import mcjty.lib.gui.widgets.Label; import mcjty.lib.gui.widgets.Panel; import mcjty.lib.gui.widgets.TextField; import mcjty.lib.network.Argument; import mcjty.lib.network.clientinfo.PacketGetInfoFromServer; import mcjty.lib.varia.BlockPosTools; import mcjty.lib.varia.Logging; import mcjty.rftools.RFTools; import mcjty.rftools.craftinggrid.GuiCraftingGrid; import mcjty.rftools.craftinggrid.PacketRequestGridSync; import mcjty.rftools.network.RFToolsMessages; import net.minecraft.block.Block; import net.minecraft.client.gui.FontRenderer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.util.text.TextFormatting; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.commons.lang3.tuple.Pair; import org.lwjgl.input.Keyboard; import org.lwjgl.input.Mouse; import java.awt.*; import java.io.IOException; import java.util.*; import java.util.List; public class GuiStorageScanner extends GenericGuiContainer<StorageScannerTileEntity> { private static final int STORAGE_MONITOR_WIDTH = 256; private static final int STORAGE_MONITOR_HEIGHT = 244; private static final ResourceLocation iconLocation = new ResourceLocation(RFTools.MODID, "textures/gui/storagescanner.png"); private static final ResourceLocation guielements = new ResourceLocation(RFTools.MODID, "textures/gui/guielements.png"); private WidgetList storageList; private WidgetList itemList; private EnergyBar energyBar; private ScrollableLabel radiusLabel; private Button scanButton; private Button topButton; private Button upButton; private Button downButton; private Button bottomButton; private TextField searchField; private ImageChoiceLabel exportToStarred; private GuiCraftingGrid craftingGrid; private long prevTime = -1; private int listDirty = 0; // From server: all the positions with inventories public static List<InventoriesInfoPacketClient.InventoryInfo> fromServer_inventories = new ArrayList<>(); // From server: all the positions with inventories matching the search public static Set<BlockPos> fromServer_foundInventories = new HashSet<>(); // From server: the contents of an inventory public static List<ItemStack> fromServer_inventory = new ArrayList<>(); public GuiStorageScanner(StorageScannerTileEntity storageScannerTileEntity, StorageScannerContainer storageScannerContainer) { super(RFTools.instance, RFToolsMessages.INSTANCE, storageScannerTileEntity, storageScannerContainer, RFTools.GUI_MANUAL_MAIN, "stomon"); GenericEnergyStorageTileEntity.setCurrentRF(storageScannerTileEntity.getEnergyStored(EnumFacing.DOWN)); craftingGrid = new GuiCraftingGrid(); xSize = STORAGE_MONITOR_WIDTH; ySize = STORAGE_MONITOR_HEIGHT; } @Override public void initGui() { super.initGui(); int maxEnergyStored = tileEntity.getMaxEnergyStored(EnumFacing.DOWN); energyBar = new EnergyBar(mc, this).setFilledRectThickness(1).setVertical().setDesiredWidth(10).setDesiredHeight(56).setMaxValue(maxEnergyStored).setShowText(false); energyBar.setValue(GenericEnergyStorageTileEntity.getCurrentRF()); upButton = new Button(mc, this).setText("U").setTooltips("Move inventory up") .addButtonEvent(widget -> { moveUp(); }); topButton = new Button(mc, this).setText("T").setTooltips("Move inventory to the top") .addButtonEvent(widget -> { moveTop(); }); downButton = new Button(mc, this).setText("D").setTooltips("Move inventory down") .addButtonEvent(widget -> { moveDown(); }); bottomButton = new Button(mc, this).setText("B").setTooltips("Move inventory to the bottom") .addButtonEvent(widget -> { moveBottom(); }); Panel energyPanel = new Panel(mc, this).setLayout(new VerticalLayout().setVerticalMargin(0).setSpacing(1)) .setDesiredWidth(10); energyPanel .addChild(energyBar) .addChild(topButton) .addChild(upButton) .addChild(downButton) .addChild(bottomButton); exportToStarred = new ImageChoiceLabel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(12, 223, 13, 13)) .addChoiceEvent((parent, newChoice) -> changeExportMode()); exportToStarred.addChoice("No", "Export to current container", guielements, 131, 19); exportToStarred.addChoice("Yes", "Export to first routable container", guielements, 115, 19); storageList = new WidgetList(mc, this).addSelectionEvent(new DefaultSelectionEvent() { @Override public void select(Widget parent, int index) { getInventoryOnServer(); } @Override public void doubleClick(Widget parent, int index) { hilightSelectedContainer(index); } }).setPropagateEventsToChildren(true); Slider storageListSlider = new Slider(mc, this).setDesiredWidth(10).setVertical().setScrollable(storageList); Panel storagePanel = new Panel(mc, this).setLayout(new HorizontalLayout().setSpacing(1).setHorizontalMargin(1)) .setLayoutHint(new PositionalLayout.PositionalHint(3, 4, 130, 86+54)) .setDesiredHeight(86+54) .addChild(energyPanel) .addChild(storageList).addChild(storageListSlider); itemList = new WidgetList(mc, this).setPropagateEventsToChildren(true) .setInvisibleSelection(true); Slider itemListSlider = new Slider(mc, this).setDesiredWidth(10).setVertical().setScrollable(itemList); Panel itemPanel = new Panel(mc, this) .setLayout(new HorizontalLayout().setSpacing(1).setHorizontalMargin(1)) .setLayoutHint(new PositionalLayout.PositionalHint(136, 4, 256-138-4, 86+54)) .addChild(itemList).addChild(itemListSlider); scanButton = new Button(mc, this) .setText("Scan") .setDesiredWidth(50) .setDesiredHeight(14) .addButtonEvent(parent -> RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new InventoriesInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), true)))) .setTooltips("Start/stop a scan of", "all storage units", "in radius"); radiusLabel = new ScrollableLabel(mc, this) .addValueEvent((parent, newValue) -> changeRadius(newValue)) .setRealMinimum(1) .setRealMaximum(20); radiusLabel.setRealValue(tileEntity.getRadius()); searchField = new TextField(mc, this).addTextEvent((parent, newText) -> { storageList.clearHilightedRows(); fromServer_foundInventories.clear(); startSearch(newText); }); Panel searchPanel = new Panel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(8, 142, 256-11, 18)) .setLayout(new HorizontalLayout()).setDesiredHeight(18) .addChild(new Label(mc, this).setText("Search:")) .addChild(searchField); Slider radiusSlider = new Slider(mc, this) .setHorizontal() .setTooltips("Radius of scan") .setMinimumKnobSize(12) .setDesiredHeight(14) .setScrollable(radiusLabel); Panel scanPanel = new Panel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(8, 162, 74, 54)) .setFilledRectThickness(-2) .setFilledBackground(StyleConfig.colorListBackground) .setLayout(new VerticalLayout().setVerticalMargin(6).setSpacing(1)) .addChild(scanButton) .addChild(radiusSlider) .addChild(radiusLabel); if (tileEntity.isDummy()) { scanButton.setEnabled(false); radiusLabel.setVisible(false); radiusSlider.setVisible(false); } Widget toplevel = new Panel(mc, this).setBackground(iconLocation).setLayout(new PositionalLayout()) .addChild(storagePanel) .addChild(itemPanel) .addChild(searchPanel) .addChild(scanPanel) .addChild(exportToStarred); toplevel.setBounds(new Rectangle(guiLeft, guiTop, xSize, ySize)); window = new Window(this, toplevel); Keyboard.enableRepeatEvents(true); fromServer_foundInventories.clear(); fromServer_inventory.clear(); if (tileEntity.isDummy()) { fromServer_inventories.clear(); } else { tileEntity.requestRfFromServer(RFTools.MODID); } BlockPos pos = tileEntity.getCraftingGridContainerPos(); craftingGrid.initGui(modBase, network, mc, this, pos, tileEntity.getCraftingGridProvider(), guiLeft, guiTop, xSize, ySize); network.sendToServer(new PacketRequestGridSync(pos)); } @Override protected void mouseClicked(int x, int y, int button) throws IOException { super.mouseClicked(x, y, button); craftingGrid.getWindow().mouseClicked(x, y, button); } @Override public void handleMouseInput() throws IOException { super.handleMouseInput(); craftingGrid.getWindow().handleMouseInput(); } @Override protected void mouseReleased(int x, int y, int state) { super.mouseReleased(x, y, state); craftingGrid.getWindow().mouseMovedOrUp(x, y, state); } @Override protected void keyTyped(char typedChar, int keyCode) throws IOException { super.keyTyped(typedChar, keyCode); craftingGrid.getWindow().keyTyped(typedChar, keyCode); } private void moveUp() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_UP, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getSelected()-1); listDirty = 0; } private void moveTop() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOP, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(1); listDirty = 0; } private void moveDown() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_DOWN, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getSelected()+1); listDirty = 0; } private void moveBottom() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_BOTTOM, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getChildCount()-1); listDirty = 0; } private void changeExportMode() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOGGLEEXPORT); } private void hilightSelectedContainer(int index) { if (index == -1) { return; } if (index == 0) { // Starred return; } InventoriesInfoPacketClient.InventoryInfo c = fromServer_inventories.get(index-1); if (c != null) { RFTools.instance.clientInfo.hilightBlock(c.getPos(), System.currentTimeMillis() + 1000 * StorageScannerConfiguration.hilightTime); Logging.message(mc.thePlayer, "The inventory is now highlighted"); mc.thePlayer.closeScreen(); } } private void changeRadius(int r) { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_SETRADIUS, new Argument("r", r)); } private void startSearch(String text) { if (!text.isEmpty()) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new SearchItemsInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), text))); } } private void getInventoryOnServer() { BlockPos c = getSelectedContainerPos(); if (c != null) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new GetContentsInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), c))); } } private BlockPos getSelectedContainerPos() { int selected = storageList.getSelected(); if (selected != -1) { if (selected == 0) { return new BlockPos(-1, -1, -1); } selected--; if (selected < fromServer_inventories.size()) { InventoriesInfoPacketClient.InventoryInfo info = fromServer_inventories.get(selected); if (info == null) { return null; } else { return info.getPos(); } } } return null; } private void requestListsIfNeeded() { listDirty--; if (listDirty <= 0) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new InventoriesInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), false))); getInventoryOnServer(); listDirty = 20; } } private void updateContentsList() { itemList.removeChildren(); Pair<Panel,Integer> currentPos = MutablePair.of(null, 0); int numcolumns = 5; int spacing = 3; // Collections.sort(fromServer_inventory, (o1, o2) -> o1.stackSize == o2.stackSize ? 0 : o1.stackSize < o2.stackSize ? -1 : 1); Collections.sort(fromServer_inventory, (o1, o2) -> o1.getDisplayName().compareTo(o2.getDisplayName())); String filterText = searchField.getText().toLowerCase(); for (ItemStack item : fromServer_inventory) { String displayName = item.getDisplayName(); if (filterText.isEmpty() || displayName.toLowerCase().contains(filterText)) { currentPos = addItemToList(item, itemList, currentPos, numcolumns, spacing); } } } private Pair<Panel,Integer> addItemToList(ItemStack item, WidgetList itemList, Pair<Panel,Integer> currentPos, int numcolumns, int spacing) { Panel panel = currentPos.getKey(); if (panel == null || currentPos.getValue() >= numcolumns) { panel = new Panel(mc, this).setLayout(new HorizontalLayout().setSpacing(spacing).setHorizontalMargin(1)) .setDesiredHeight(12).setUserObject(new Integer(-1)).setDesiredHeight(16); currentPos = MutablePair.of(panel, 0); itemList.addChild(panel); } BlockRender blockRender = new BlockRender(mc, this) .setRenderItem(item) .setUserObject(1) // Mark as a special stack in the renderer (for tooltip) .setOffsetX(-1) .setOffsetY(-1) .setHilightOnHover(true); blockRender.addSelectionEvent(new BlockRenderEvent() { @Override public void select(Widget widget) { BlockRender br = (BlockRender) widget; Object item = br.getRenderItem(); if (item != null) { boolean shift = Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT); requestItem((ItemStack) item, shift ? 1 : -1); } } @Override public void doubleClick(Widget widget) { } }); panel.addChild(blockRender); currentPos.setValue(currentPos.getValue() + 1); return currentPos; } private void requestItem(ItemStack stack, int amount) { network.sendToServer(new PacketRequestItem(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), getSelectedContainerPos(), stack, amount)); getInventoryOnServer(); } private void changeRoutable(BlockPos c) { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOGGLEROUTABLE, new Argument("pos", c)); listDirty = 0; } private void updateStorageList() { storageList.removeChildren(); addStorageLine(null, "<Starred>", false); for (InventoriesInfoPacketClient.InventoryInfo c : fromServer_inventories) { String displayName = c.getName(); boolean routable = c.isRoutable(); addStorageLine(c, displayName, routable); } storageList.clearHilightedRows(); int i = 0; for (InventoriesInfoPacketClient.InventoryInfo c : fromServer_inventories) { if (fromServer_foundInventories.contains(c.getPos())) { storageList.addHilightedRow(i+1); } i++; } } private void addStorageLine(InventoriesInfoPacketClient.InventoryInfo c, String displayName, boolean routable) { Panel panel = new Panel(mc, this).setLayout(new HorizontalLayout()); panel.addChild(new BlockRender(mc, this).setRenderItem(c == null ? null : c.getBlock())); AbstractWidget label = new Label(mc, this).setColor(StyleConfig.colorTextInListNormal) .setText(displayName) .setDynamic(true) .setHorizontalAlignment(HorizontalAlignment.ALIGH_LEFT) .setDesiredWidth(58); if (c == null) { label.setTooltips(TextFormatting.GREEN + "All routable inventories"); } else { label.setTooltips(TextFormatting.GREEN + "Block at: " + TextFormatting.WHITE + BlockPosTools.toString(c.getPos()), TextFormatting.GREEN + "Name: " + TextFormatting.WHITE + displayName, "(doubleclick to highlight)"); } panel.addChild(label); if (c != null) { ImageChoiceLabel choiceLabel = new ImageChoiceLabel(mc, this) .addChoiceEvent((parent, newChoice) -> changeRoutable(c.getPos())).setDesiredWidth(13); choiceLabel.addChoice("No", "Not routable", guielements, 131, 19); choiceLabel.addChoice("Yes", "Routable", guielements, 115, 19); choiceLabel.setCurrentChoice(routable ? 1 : 0); panel.addChild(choiceLabel); } storageList.addChild(panel); } @Override protected void drawGuiContainerBackgroundLayer(float v, int i, int i2) { updateStorageList(); updateContentsList(); requestListsIfNeeded(); int selected = storageList.getSelected(); if (selected <= 0 || storageList.getChildCount() <= 2) { upButton.setEnabled(false); downButton.setEnabled(false); topButton.setEnabled(false); bottomButton.setEnabled(false); } else if (selected == 1) { topButton.setEnabled(false); upButton.setEnabled(false); downButton.setEnabled(true); bottomButton.setEnabled(true); } else if (selected == storageList.getChildCount()-1) { topButton.setEnabled(true); upButton.setEnabled(true); downButton.setEnabled(false); bottomButton.setEnabled(false); } else { topButton.setEnabled(true); upButton.setEnabled(true); downButton.setEnabled(true); bottomButton.setEnabled(true); } if (!tileEntity.isDummy()) { tileEntity.requestRfFromServer(RFTools.MODID); int currentRF = GenericEnergyStorageTileEntity.getCurrentRF(); energyBar.setValue(currentRF); exportToStarred.setCurrentChoice(tileEntity.isExportToCurrent() ? 0 : 1); } else { if (System.currentTimeMillis() - lastTime > 300) { lastTime = System.currentTimeMillis(); RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new ScannerInfoPacketServer(tileEntity.getDimension(), tileEntity.getPos()))); } energyBar.setValue(ScannerInfoPacketClient.rfReceived); exportToStarred.setCurrentChoice(ScannerInfoPacketClient.exportToCurrentReceived ? 0 : 1); } drawWindow(); } @Override protected void drawGuiContainerForegroundLayer(int i1, int i2) { int x = Mouse.getEventX() * width / mc.displayWidth; int y = height - Mouse.getEventY() * height / mc.displayHeight - 1; List<String> tooltips = craftingGrid.getWindow().getTooltips(); if (tooltips != null) { drawHoveringText(tooltips, window.getTooltipItems(), x - guiLeft, y - guiTop, mc.fontRendererObj); } super.drawGuiContainerForegroundLayer(i1, i2); } @Override public void drawScreen(int mouseX, int mouseY, float partialTicks) { super.drawScreen(mouseX, mouseY, partialTicks); int x = Mouse.getEventX() * width / mc.displayWidth; int y = height - Mouse.getEventY() * height / mc.displayHeight - 1; Widget widget = window.getToplevel().getWidgetAtPosition(x, y); if (widget instanceof BlockRender) { BlockRender blockRender = (BlockRender) widget; Object renderItem = blockRender.getRenderItem(); ItemStack itemStack; if (renderItem instanceof ItemStack) { itemStack = (ItemStack) renderItem; } else if (renderItem instanceof Block) { itemStack = new ItemStack((Block) renderItem); } else if (renderItem instanceof Item) { itemStack = new ItemStack((Item) renderItem); } else { itemStack = null; } if (itemStack != null) { boolean custom = blockRender.getUserObject() instanceof Integer; customRenderToolTip(itemStack, mouseX, mouseY, custom); } } } private void customRenderToolTip(ItemStack stack, int x, int y, boolean custom) { List<String> list = stack.getTooltip(this.mc.thePlayer, this.mc.gameSettings.advancedItemTooltips); for (int i = 0; i < list.size(); ++i) { if (i == 0) { list.set(i, stack.getRarity().rarityColor + list.get(i)); } else { list.set(i, TextFormatting.GRAY + list.get(i)); } } if (custom) { List<String> newlist = new ArrayList<>(); newlist.add(TextFormatting.GREEN + "Click: "+ TextFormatting.WHITE + "full stack"); newlist.add(TextFormatting.GREEN + "Shift + click: "+ TextFormatting.WHITE + "single item"); newlist.add(""); newlist.addAll(list); list = newlist; } FontRenderer font = stack.getItem().getFontRenderer(stack); this.drawHoveringText(list, x, y, (font == null ? fontRendererObj : font)); } private static long lastTime = 0; @Override protected void drawWindow() { super.drawWindow(); craftingGrid.draw(); } }
src/main/java/mcjty/rftools/blocks/storagemonitor/GuiStorageScanner.java
package mcjty.rftools.blocks.storagemonitor; import mcjty.lib.base.StyleConfig; import mcjty.lib.container.GenericGuiContainer; import mcjty.lib.entity.GenericEnergyStorageTileEntity; import mcjty.lib.gui.Window; import mcjty.lib.gui.events.BlockRenderEvent; import mcjty.lib.gui.events.DefaultSelectionEvent; import mcjty.lib.gui.layout.HorizontalAlignment; import mcjty.lib.gui.layout.HorizontalLayout; import mcjty.lib.gui.layout.PositionalLayout; import mcjty.lib.gui.layout.VerticalLayout; import mcjty.lib.gui.widgets.*; import mcjty.lib.gui.widgets.Button; import mcjty.lib.gui.widgets.Label; import mcjty.lib.gui.widgets.Panel; import mcjty.lib.gui.widgets.TextField; import mcjty.lib.network.Argument; import mcjty.lib.network.clientinfo.PacketGetInfoFromServer; import mcjty.lib.varia.BlockPosTools; import mcjty.lib.varia.Logging; import mcjty.rftools.RFTools; import mcjty.rftools.craftinggrid.GuiCraftingGrid; import mcjty.rftools.craftinggrid.PacketRequestGridSync; import mcjty.rftools.network.RFToolsMessages; import net.minecraft.block.Block; import net.minecraft.client.gui.FontRenderer; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.util.EnumFacing; import net.minecraft.util.ResourceLocation; import net.minecraft.util.math.BlockPos; import net.minecraft.util.text.TextFormatting; import org.apache.commons.lang3.tuple.MutablePair; import org.apache.commons.lang3.tuple.Pair; import org.lwjgl.input.Keyboard; import org.lwjgl.input.Mouse; import java.awt.*; import java.io.IOException; import java.util.*; import java.util.List; public class GuiStorageScanner extends GenericGuiContainer<StorageScannerTileEntity> { private static final int STORAGE_MONITOR_WIDTH = 256; private static final int STORAGE_MONITOR_HEIGHT = 244; private static final ResourceLocation iconLocation = new ResourceLocation(RFTools.MODID, "textures/gui/storagescanner.png"); private static final ResourceLocation guielements = new ResourceLocation(RFTools.MODID, "textures/gui/guielements.png"); private WidgetList storageList; private WidgetList itemList; private EnergyBar energyBar; private ScrollableLabel radiusLabel; private Button scanButton; private Button topButton; private Button upButton; private Button downButton; private Button bottomButton; private TextField searchField; private ImageChoiceLabel exportToStarred; private GuiCraftingGrid craftingGrid; private long prevTime = -1; private int listDirty = 0; // From server: all the positions with inventories public static List<InventoriesInfoPacketClient.InventoryInfo> fromServer_inventories = new ArrayList<>(); // From server: all the positions with inventories matching the search public static Set<BlockPos> fromServer_foundInventories = new HashSet<>(); // From server: the contents of an inventory public static List<ItemStack> fromServer_inventory = new ArrayList<>(); public GuiStorageScanner(StorageScannerTileEntity storageScannerTileEntity, StorageScannerContainer storageScannerContainer) { super(RFTools.instance, RFToolsMessages.INSTANCE, storageScannerTileEntity, storageScannerContainer, RFTools.GUI_MANUAL_MAIN, "stomon"); GenericEnergyStorageTileEntity.setCurrentRF(storageScannerTileEntity.getEnergyStored(EnumFacing.DOWN)); craftingGrid = new GuiCraftingGrid(); xSize = STORAGE_MONITOR_WIDTH; ySize = STORAGE_MONITOR_HEIGHT; } @Override public void initGui() { super.initGui(); int maxEnergyStored = tileEntity.getMaxEnergyStored(EnumFacing.DOWN); energyBar = new EnergyBar(mc, this).setFilledRectThickness(1).setVertical().setDesiredWidth(10).setDesiredHeight(56).setMaxValue(maxEnergyStored).setShowText(false); energyBar.setValue(GenericEnergyStorageTileEntity.getCurrentRF()); upButton = new Button(mc, this).setText("U").setTooltips("Move inventory up") .addButtonEvent(widget -> { moveUp(); }); topButton = new Button(mc, this).setText("T").setTooltips("Move inventory to the top") .addButtonEvent(widget -> { moveTop(); }); downButton = new Button(mc, this).setText("D").setTooltips("Move inventory down") .addButtonEvent(widget -> { moveDown(); }); bottomButton = new Button(mc, this).setText("B").setTooltips("Move inventory to the bottom") .addButtonEvent(widget -> { moveBottom(); }); Panel energyPanel = new Panel(mc, this).setLayout(new VerticalLayout().setVerticalMargin(0).setSpacing(1)) .setDesiredWidth(10); energyPanel .addChild(energyBar) .addChild(topButton) .addChild(upButton) .addChild(downButton) .addChild(bottomButton); exportToStarred = new ImageChoiceLabel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(12, 223, 13, 13)) .addChoiceEvent((parent, newChoice) -> changeExportMode()); exportToStarred.addChoice("No", "Export to current container", guielements, 131, 19); exportToStarred.addChoice("Yes", "Export to first routable container", guielements, 115, 19); storageList = new WidgetList(mc, this).addSelectionEvent(new DefaultSelectionEvent() { @Override public void select(Widget parent, int index) { getInventoryOnServer(); } @Override public void doubleClick(Widget parent, int index) { hilightSelectedContainer(index); } }).setPropagateEventsToChildren(true); Slider storageListSlider = new Slider(mc, this).setDesiredWidth(10).setVertical().setScrollable(storageList); Panel storagePanel = new Panel(mc, this).setLayout(new HorizontalLayout().setSpacing(1).setHorizontalMargin(1)) .setLayoutHint(new PositionalLayout.PositionalHint(3, 4, 130, 86+54)) .setDesiredHeight(86+54) .addChild(energyPanel) .addChild(storageList).addChild(storageListSlider); itemList = new WidgetList(mc, this).setPropagateEventsToChildren(true) .setInvisibleSelection(true); Slider itemListSlider = new Slider(mc, this).setDesiredWidth(10).setVertical().setScrollable(itemList); Panel itemPanel = new Panel(mc, this) .setLayout(new HorizontalLayout().setSpacing(1).setHorizontalMargin(1)) .setLayoutHint(new PositionalLayout.PositionalHint(136, 4, 256-138-4, 86+54)) .addChild(itemList).addChild(itemListSlider); scanButton = new Button(mc, this) .setText("Scan") .setDesiredWidth(50) .setDesiredHeight(14) .addButtonEvent(parent -> RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new InventoriesInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), true)))) .setTooltips("Start/stop a scan of", "all storage units", "in radius"); radiusLabel = new ScrollableLabel(mc, this) .addValueEvent((parent, newValue) -> changeRadius(newValue)) .setRealMinimum(1) .setRealMaximum(20); radiusLabel.setRealValue(tileEntity.getRadius()); searchField = new TextField(mc, this).addTextEvent((parent, newText) -> { storageList.clearHilightedRows(); fromServer_foundInventories.clear(); startSearch(newText); }); Panel searchPanel = new Panel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(8, 142, 256-11, 18)) .setLayout(new HorizontalLayout()).setDesiredHeight(18) .addChild(new Label(mc, this).setText("Search:")) .addChild(searchField); Slider radiusSlider = new Slider(mc, this) .setHorizontal() .setTooltips("Radius of scan") .setMinimumKnobSize(12) .setDesiredHeight(14) .setScrollable(radiusLabel); Panel scanPanel = new Panel(mc, this) .setLayoutHint(new PositionalLayout.PositionalHint(8, 162, 74, 54)) .setFilledRectThickness(-2) .setFilledBackground(StyleConfig.colorListBackground) .setLayout(new VerticalLayout().setVerticalMargin(6).setSpacing(1)) .addChild(scanButton) .addChild(radiusSlider) .addChild(radiusLabel); if (tileEntity.isDummy()) { scanButton.setEnabled(false); radiusLabel.setVisible(false); radiusSlider.setVisible(false); } Widget toplevel = new Panel(mc, this).setBackground(iconLocation).setLayout(new PositionalLayout()) .addChild(storagePanel) .addChild(itemPanel) .addChild(searchPanel) .addChild(scanPanel) .addChild(exportToStarred); toplevel.setBounds(new Rectangle(guiLeft, guiTop, xSize, ySize)); window = new Window(this, toplevel); Keyboard.enableRepeatEvents(true); fromServer_foundInventories.clear(); fromServer_inventory.clear(); if (tileEntity.isDummy()) { fromServer_inventories.clear(); } else { tileEntity.requestRfFromServer(RFTools.MODID); } BlockPos pos = tileEntity.getCraftingGridContainerPos(); craftingGrid.initGui(modBase, network, mc, this, pos, tileEntity.getCraftingGridProvider(), guiLeft, guiTop, xSize, ySize); network.sendToServer(new PacketRequestGridSync(pos)); } @Override protected void mouseClicked(int x, int y, int button) throws IOException { super.mouseClicked(x, y, button); craftingGrid.getWindow().mouseClicked(x, y, button); } @Override public void handleMouseInput() throws IOException { super.handleMouseInput(); craftingGrid.getWindow().handleMouseInput(); } @Override protected void mouseReleased(int x, int y, int state) { super.mouseReleased(x, y, state); craftingGrid.getWindow().mouseMovedOrUp(x, y, state); } @Override protected void keyTyped(char typedChar, int keyCode) throws IOException { super.keyTyped(typedChar, keyCode); craftingGrid.getWindow().keyTyped(typedChar, keyCode); } private void moveUp() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_UP, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getSelected()-1); listDirty = 0; } private void moveTop() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOP, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(1); listDirty = 0; } private void moveDown() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_DOWN, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getSelected()+1); listDirty = 0; } private void moveBottom() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_BOTTOM, new Argument("index", storageList.getSelected()-1)); storageList.setSelected(storageList.getChildCount()-1); listDirty = 0; } private void changeExportMode() { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOGGLEEXPORT); } private void hilightSelectedContainer(int index) { if (index == -1) { return; } if (index == 0) { // Starred return; } InventoriesInfoPacketClient.InventoryInfo c = fromServer_inventories.get(index-1); if (c != null) { RFTools.instance.clientInfo.hilightBlock(c.getPos(), System.currentTimeMillis() + 1000 * StorageScannerConfiguration.hilightTime); Logging.message(mc.thePlayer, "The inventory is now highlighted"); mc.thePlayer.closeScreen(); } } private void changeRadius(int r) { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_SETRADIUS, new Argument("r", r)); } private void startSearch(String text) { if (!text.isEmpty()) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new SearchItemsInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), text))); } } private void getInventoryOnServer() { BlockPos c = getSelectedContainerPos(); if (c != null) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new GetContentsInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), c))); } } private BlockPos getSelectedContainerPos() { int selected = storageList.getSelected(); if (selected != -1) { if (selected == 0) { return new BlockPos(-1, -1, -1); } selected--; if (selected < fromServer_inventories.size()) { InventoriesInfoPacketClient.InventoryInfo info = fromServer_inventories.get(selected); if (info == null) { return null; } else { return info.getPos(); } } } return null; } private void requestListsIfNeeded() { listDirty--; if (listDirty <= 0) { RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new InventoriesInfoPacketServer(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), false))); getInventoryOnServer(); listDirty = 20; } } private void updateContentsList() { itemList.removeChildren(); Pair<Panel,Integer> currentPos = MutablePair.of(null, 0); int numcolumns = 5; int spacing = 3; // Collections.sort(fromServer_inventory, (o1, o2) -> o1.stackSize == o2.stackSize ? 0 : o1.stackSize < o2.stackSize ? -1 : 1); Collections.sort(fromServer_inventory, (o1, o2) -> o1.getDisplayName().compareTo(o2.getDisplayName())); String filterText = searchField.getText(); for (ItemStack item : fromServer_inventory) { String displayName = item.getDisplayName(); if (filterText.isEmpty() || displayName.toLowerCase().contains(filterText)) { currentPos = addItemToList(item, itemList, currentPos, numcolumns, spacing); } } } private Pair<Panel,Integer> addItemToList(ItemStack item, WidgetList itemList, Pair<Panel,Integer> currentPos, int numcolumns, int spacing) { Panel panel = currentPos.getKey(); if (panel == null || currentPos.getValue() >= numcolumns) { panel = new Panel(mc, this).setLayout(new HorizontalLayout().setSpacing(spacing).setHorizontalMargin(1)) .setDesiredHeight(12).setUserObject(new Integer(-1)).setDesiredHeight(16); currentPos = MutablePair.of(panel, 0); itemList.addChild(panel); } BlockRender blockRender = new BlockRender(mc, this) .setRenderItem(item) .setUserObject(1) // Mark as a special stack in the renderer (for tooltip) .setOffsetX(-1) .setOffsetY(-1) .setHilightOnHover(true); blockRender.addSelectionEvent(new BlockRenderEvent() { @Override public void select(Widget widget) { BlockRender br = (BlockRender) widget; Object item = br.getRenderItem(); if (item != null) { boolean shift = Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT); requestItem((ItemStack) item, shift ? 1 : -1); } } @Override public void doubleClick(Widget widget) { } }); panel.addChild(blockRender); currentPos.setValue(currentPos.getValue() + 1); return currentPos; } private void requestItem(ItemStack stack, int amount) { network.sendToServer(new PacketRequestItem(tileEntity.getDimension(), tileEntity.getStorageScannerPos(), getSelectedContainerPos(), stack, amount)); getInventoryOnServer(); } private void changeRoutable(BlockPos c) { sendServerCommand(RFToolsMessages.INSTANCE, tileEntity.getDimension(), StorageScannerTileEntity.CMD_TOGGLEROUTABLE, new Argument("pos", c)); listDirty = 0; } private void updateStorageList() { storageList.removeChildren(); addStorageLine(null, "<Starred>", false); for (InventoriesInfoPacketClient.InventoryInfo c : fromServer_inventories) { String displayName = c.getName(); boolean routable = c.isRoutable(); addStorageLine(c, displayName, routable); } storageList.clearHilightedRows(); int i = 0; for (InventoriesInfoPacketClient.InventoryInfo c : fromServer_inventories) { if (fromServer_foundInventories.contains(c.getPos())) { storageList.addHilightedRow(i+1); } i++; } } private void addStorageLine(InventoriesInfoPacketClient.InventoryInfo c, String displayName, boolean routable) { Panel panel = new Panel(mc, this).setLayout(new HorizontalLayout()); panel.addChild(new BlockRender(mc, this).setRenderItem(c == null ? null : c.getBlock())); AbstractWidget label = new Label(mc, this).setColor(StyleConfig.colorTextInListNormal) .setText(displayName) .setDynamic(true) .setHorizontalAlignment(HorizontalAlignment.ALIGH_LEFT) .setDesiredWidth(58); if (c == null) { label.setTooltips(TextFormatting.GREEN + "All routable inventories"); } else { label.setTooltips(TextFormatting.GREEN + "Block at: " + TextFormatting.WHITE + BlockPosTools.toString(c.getPos()), TextFormatting.GREEN + "Name: " + TextFormatting.WHITE + displayName, "(doubleclick to highlight)"); } panel.addChild(label); if (c != null) { ImageChoiceLabel choiceLabel = new ImageChoiceLabel(mc, this) .addChoiceEvent((parent, newChoice) -> changeRoutable(c.getPos())).setDesiredWidth(13); choiceLabel.addChoice("No", "Not routable", guielements, 131, 19); choiceLabel.addChoice("Yes", "Routable", guielements, 115, 19); choiceLabel.setCurrentChoice(routable ? 1 : 0); panel.addChild(choiceLabel); } storageList.addChild(panel); } @Override protected void drawGuiContainerBackgroundLayer(float v, int i, int i2) { updateStorageList(); updateContentsList(); requestListsIfNeeded(); int selected = storageList.getSelected(); if (selected <= 0 || storageList.getChildCount() <= 2) { upButton.setEnabled(false); downButton.setEnabled(false); topButton.setEnabled(false); bottomButton.setEnabled(false); } else if (selected == 1) { topButton.setEnabled(false); upButton.setEnabled(false); downButton.setEnabled(true); bottomButton.setEnabled(true); } else if (selected == storageList.getChildCount()-1) { topButton.setEnabled(true); upButton.setEnabled(true); downButton.setEnabled(false); bottomButton.setEnabled(false); } else { topButton.setEnabled(true); upButton.setEnabled(true); downButton.setEnabled(true); bottomButton.setEnabled(true); } if (!tileEntity.isDummy()) { tileEntity.requestRfFromServer(RFTools.MODID); int currentRF = GenericEnergyStorageTileEntity.getCurrentRF(); energyBar.setValue(currentRF); exportToStarred.setCurrentChoice(tileEntity.isExportToCurrent() ? 0 : 1); } else { if (System.currentTimeMillis() - lastTime > 300) { lastTime = System.currentTimeMillis(); RFToolsMessages.INSTANCE.sendToServer(new PacketGetInfoFromServer(RFTools.MODID, new ScannerInfoPacketServer(tileEntity.getDimension(), tileEntity.getPos()))); } energyBar.setValue(ScannerInfoPacketClient.rfReceived); exportToStarred.setCurrentChoice(ScannerInfoPacketClient.exportToCurrentReceived ? 0 : 1); } drawWindow(); } @Override protected void drawGuiContainerForegroundLayer(int i1, int i2) { int x = Mouse.getEventX() * width / mc.displayWidth; int y = height - Mouse.getEventY() * height / mc.displayHeight - 1; List<String> tooltips = craftingGrid.getWindow().getTooltips(); if (tooltips != null) { drawHoveringText(tooltips, window.getTooltipItems(), x - guiLeft, y - guiTop, mc.fontRendererObj); } super.drawGuiContainerForegroundLayer(i1, i2); } @Override public void drawScreen(int mouseX, int mouseY, float partialTicks) { super.drawScreen(mouseX, mouseY, partialTicks); int x = Mouse.getEventX() * width / mc.displayWidth; int y = height - Mouse.getEventY() * height / mc.displayHeight - 1; Widget widget = window.getToplevel().getWidgetAtPosition(x, y); if (widget instanceof BlockRender) { BlockRender blockRender = (BlockRender) widget; Object renderItem = blockRender.getRenderItem(); ItemStack itemStack; if (renderItem instanceof ItemStack) { itemStack = (ItemStack) renderItem; } else if (renderItem instanceof Block) { itemStack = new ItemStack((Block) renderItem); } else if (renderItem instanceof Item) { itemStack = new ItemStack((Item) renderItem); } else { itemStack = null; } if (itemStack != null) { boolean custom = blockRender.getUserObject() instanceof Integer; customRenderToolTip(itemStack, mouseX, mouseY, custom); } } } private void customRenderToolTip(ItemStack stack, int x, int y, boolean custom) { List<String> list = stack.getTooltip(this.mc.thePlayer, this.mc.gameSettings.advancedItemTooltips); for (int i = 0; i < list.size(); ++i) { if (i == 0) { list.set(i, stack.getRarity().rarityColor + list.get(i)); } else { list.set(i, TextFormatting.GRAY + list.get(i)); } } if (custom) { List<String> newlist = new ArrayList<>(); newlist.add(TextFormatting.GREEN + "Click: "+ TextFormatting.WHITE + "full stack"); newlist.add(TextFormatting.GREEN + "Shift + click: "+ TextFormatting.WHITE + "single item"); newlist.add(""); newlist.addAll(list); list = newlist; } FontRenderer font = stack.getItem().getFontRenderer(stack); this.drawHoveringText(list, x, y, (font == null ? fontRendererObj : font)); } private static long lastTime = 0; @Override protected void drawWindow() { super.drawWindow(); craftingGrid.draw(); } }
Fixed searching in the storage scanner so that 'RED' will highlight the inventories with redstone AND also actually find redstone in the list of items
src/main/java/mcjty/rftools/blocks/storagemonitor/GuiStorageScanner.java
Fixed searching in the storage scanner so that 'RED' will highlight the inventories with redstone AND also actually find redstone in the list of items
Java
mit
c9dd92345382e57c55396b5bc146023c84c5f2a5
0
Valakor/CS201-Coursework,brandonsbarber/CS201-Course-Work
package cs201.gui; import java.awt.BorderLayout; import java.awt.CardLayout; import java.awt.Color; import java.awt.Dimension; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import cs201.agents.PersonAgent; import cs201.agents.PersonAgent.Intention; import cs201.gui.structures.market.MarketAnimationPanel; import cs201.gui.structures.restaurant.RestaurantAnimationPanelMatt; import cs201.helper.CityDirectory; import cs201.helper.CityTime; import cs201.roles.marketRoles.MarketEmployeeRole; import cs201.roles.marketRoles.MarketManagerRole.ItemRequest; import cs201.structures.market.MarketStructure; import cs201.structures.restaurant.RestaurantMatt; public class SimCity201 extends JFrame { private final int SIZEX = 1200; private final int SIZEY = 800; CityPanel cityPanel; JPanel buildingPanels; CardLayout cardLayout; SettingsPanel settingsPanel; public SimCity201() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setVisible(true); setSize(SIZEX, SIZEY); JPanel guiPanel = new JPanel(); setLayout(new BorderLayout()); guiPanel.setLayout(new BorderLayout()); cityPanel = new CityPanel(); cityPanel.setPreferredSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityPanel.setMaximumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityPanel.setMinimumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cardLayout = new CardLayout(); buildingPanels = new JPanel(); buildingPanels.setLayout(cardLayout); buildingPanels.setMinimumSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setMaximumSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setPreferredSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setBackground(Color.YELLOW); // Create initial buildings here and add them to cityPanel and buildingPanels JScrollPane cityScrollPane = new JScrollPane(cityPanel); cityScrollPane.setMinimumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setMaximumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setPreferredSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); cityScrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); guiPanel.add(BorderLayout.WEST, cityScrollPane); guiPanel.add(BorderLayout.EAST, buildingPanels); settingsPanel = new SettingsPanel(); settingsPanel.setMinimumSize(new Dimension(SIZEX, SIZEY * 2/5)); settingsPanel.setMaximumSize(new Dimension(SIZEX, SIZEY * 2/5)); settingsPanel.setPreferredSize(new Dimension(SIZEX, SIZEY * 2/5)); add(BorderLayout.SOUTH, settingsPanel); add(BorderLayout.NORTH, guiPanel); settingsPanel.addPanel("Restaurants",new ConfigPanel()); settingsPanel.addPanel("Transit",new TransitConfigPanel()); settingsPanel.addPanel("Transit",new TransitConfigPanel()); settingsPanel.addPanel("Banks",new ConfigPanel()); settingsPanel.addPanel("Markets",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Restaurants",new ConfigPanel()); settingsPanel.addPanel("Restaurants",new ConfigPanel()); RestaurantAnimationPanelMatt g = new RestaurantAnimationPanelMatt(0,this); RestaurantMatt r = new RestaurantMatt(100,100,50,50,0,g); r.setStructurePanel(g); r.setClosingTime(new CityTime(10, 30)); buildingPanels.add(g,""+0); cityPanel.addStructure(r); CityDirectory.getInstance().addRestaurant(r); MarketAnimationPanel mG = new MarketAnimationPanel(1,this, 50, 50); MarketStructure m = new MarketStructure(225,100,50,50,1,mG); m.setStructurePanel(mG); buildingPanels.add(mG,""+1); cityPanel.addStructure(m); CityDirectory.getInstance().addMarket(m); MarketAnimationPanel mG2 = new MarketAnimationPanel(1,this, 50, 50); MarketStructure m2 = new MarketStructure(19*25,9*25,50,50,1,mG2); m.setStructurePanel(mG2); buildingPanels.add(mG2,""+2); cityPanel.addStructure(m2); CityDirectory.getInstance().addMarket(m2); pack(); CityDirectory.getInstance().startTime(); /* * Delivery Truck testing m.addInventory("Pizza", 20, 20); m.getManager().msgHereIsMyOrderForDelivery(r, new ItemRequest("Pizza",1)); m.getManager().pickAndExecuteAnAction(); ((MarketEmployeeRole)m.getEmployees().get(0)).pickAndExecuteAnAction(); m.getManager().pickAndExecuteAnAction(); */ /* PersonAgent p = new PersonAgent("Cashier"); p.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantCashier, r, null); CityDirectory.getInstance().addPerson(p); p.startThread(); PersonAgent p2 = new PersonAgent("Cook"); p2.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantCook, r, null); CityDirectory.getInstance().addPerson(p2); p2.startThread(); PersonAgent p3 = new PersonAgent("Waiter 1"); p3.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantWaiter, r, null); CityDirectory.getInstance().addPerson(p3); p3.startThread(); PersonAgent p4 = new PersonAgent("Host"); p4.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantHost, r, null); CityDirectory.getInstance().addPerson(p4); p4.startThread(); PersonAgent p5 = new PersonAgent("Customer 1"); p5.setupPerson(CityDirectory.getInstance().getTime(), null, null, null, r, null); CityDirectory.getInstance().addPerson(p5); p5.startThread(); PersonAgent p6 = new PersonAgent("Waiter 2"); p6.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantWaiter, r, null); CityDirectory.getInstance().addPerson(p6); p6.startThread(); PersonAgent p7 = new PersonAgent("Customer 2"); p7.setupPerson(CityDirectory.getInstance().getTime(), null, null, null, r, null); CityDirectory.getInstance().addPerson(p7); p7.startThread(); */ } public void displayStructurePanel(StructurePanel bp) { cardLayout.show(buildingPanels, bp.getName()); } }
src/cs201/gui/SimCity201.java
package cs201.gui; import java.awt.BorderLayout; import java.awt.CardLayout; import java.awt.Color; import java.awt.Dimension; import javax.swing.JFrame; import javax.swing.JPanel; import javax.swing.JScrollPane; import cs201.agents.PersonAgent; import cs201.agents.PersonAgent.Intention; import cs201.gui.structures.market.MarketAnimationPanel; import cs201.gui.structures.restaurant.RestaurantAnimationPanelMatt; import cs201.helper.CityDirectory; import cs201.helper.CityTime; import cs201.roles.marketRoles.MarketEmployeeRole; import cs201.roles.marketRoles.MarketManagerRole.ItemRequest; import cs201.structures.market.MarketStructure; import cs201.structures.restaurant.RestaurantMatt; public class SimCity201 extends JFrame { private final int SIZEX = 1200; private final int SIZEY = 800; CityPanel cityPanel; JPanel buildingPanels; CardLayout cardLayout; SettingsPanel settingsPanel; public SimCity201() { setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); setVisible(true); setSize(SIZEX, SIZEY); JPanel guiPanel = new JPanel(); setLayout(new BorderLayout()); guiPanel.setLayout(new BorderLayout()); cityPanel = new CityPanel(); cityPanel.setPreferredSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityPanel.setMaximumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityPanel.setMinimumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cardLayout = new CardLayout(); buildingPanels = new JPanel(); buildingPanels.setLayout(cardLayout); buildingPanels.setMinimumSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setMaximumSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setPreferredSize(new Dimension(SIZEX * 2/5, SIZEY * 3 / 5)); buildingPanels.setBackground(Color.YELLOW); // Create initial buildings here and add them to cityPanel and buildingPanels JScrollPane cityScrollPane = new JScrollPane(cityPanel); cityScrollPane.setMinimumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setMaximumSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setPreferredSize(new Dimension(SIZEX * 3/5, SIZEY * 3 / 5)); cityScrollPane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); cityScrollPane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); guiPanel.add(BorderLayout.WEST, cityScrollPane); guiPanel.add(BorderLayout.EAST, buildingPanels); settingsPanel = new SettingsPanel(); settingsPanel.setMinimumSize(new Dimension(SIZEX, SIZEY * 2/5)); settingsPanel.setMaximumSize(new Dimension(SIZEX, SIZEY * 2/5)); settingsPanel.setPreferredSize(new Dimension(SIZEX, SIZEY * 2/5)); add(BorderLayout.SOUTH, settingsPanel); add(BorderLayout.NORTH, guiPanel); settingsPanel.addPanel("Restaurants",new ConfigPanel()); settingsPanel.addPanel("Transit",new TransitConfigPanel()); settingsPanel.addPanel("Transit",new TransitConfigPanel()); settingsPanel.addPanel("Banks",new ConfigPanel()); settingsPanel.addPanel("Markets",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Housing",new ConfigPanel()); settingsPanel.addPanel("Restaurants",new ConfigPanel()); settingsPanel.addPanel("Restaurants",new ConfigPanel()); RestaurantAnimationPanelMatt g = new RestaurantAnimationPanelMatt(0,this); RestaurantMatt r = new RestaurantMatt(100,100,50,50,0,g); r.setStructurePanel(g); r.setClosingTime(new CityTime(10, 30)); buildingPanels.add(g,""+0); cityPanel.addStructure(r); CityDirectory.getInstance().addRestaurant(r); MarketAnimationPanel mG = new MarketAnimationPanel(1,this, 50, 50); MarketStructure m = new MarketStructure(225,100,50,50,1,mG); m.setStructurePanel(mG); buildingPanels.add(mG,""+1); cityPanel.addStructure(m); CityDirectory.getInstance().addMarket(m); MarketAnimationPanel mG2 = new MarketAnimationPanel(1,this, 50, 50); MarketStructure m2 = new MarketStructure(19*25,9*25,50,50,1,mG2); m.setStructurePanel(mG2); buildingPanels.add(mG2,""+2); cityPanel.addStructure(m2); CityDirectory.getInstance().addMarket(m2); pack(); CityDirectory.getInstance().startTime(); /* * Delivery Truck testing m.addInventory("Pizza", 20, 20); m.getManager().msgHereIsMyOrderForDelivery(r, new ItemRequest("Pizza",1)); m.getManager().pickAndExecuteAnAction(); ((MarketEmployeeRole)m.getEmployees().get(0)).pickAndExecuteAnAction(); m.getManager().pickAndExecuteAnAction(); */ PersonAgent p = new PersonAgent("Cashier"); p.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantCashier, r, null); CityDirectory.getInstance().addPerson(p); p.startThread(); PersonAgent p2 = new PersonAgent("Cook"); p2.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantCook, r, null); CityDirectory.getInstance().addPerson(p2); p2.startThread(); PersonAgent p3 = new PersonAgent("Waiter 1"); p3.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantWaiter, r, null); CityDirectory.getInstance().addPerson(p3); p3.startThread(); PersonAgent p4 = new PersonAgent("Host"); p4.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantHost, r, null); CityDirectory.getInstance().addPerson(p4); p4.startThread(); PersonAgent p5 = new PersonAgent("Customer 1"); p5.setupPerson(CityDirectory.getInstance().getTime(), null, null, null, r, null); CityDirectory.getInstance().addPerson(p5); p5.startThread(); PersonAgent p6 = new PersonAgent("Waiter 2"); p6.setupPerson(CityDirectory.getInstance().getTime(), null, r, Intention.RestaurantWaiter, r, null); CityDirectory.getInstance().addPerson(p6); p6.startThread(); PersonAgent p7 = new PersonAgent("Customer 2"); p7.setupPerson(CityDirectory.getInstance().getTime(), null, null, null, r, null); CityDirectory.getInstance().addPerson(p7); p7.startThread(); } public void displayStructurePanel(StructurePanel bp) { cardLayout.show(buildingPanels, bp.getName()); } }
Removed the extra people in the constructor of SimCity201
src/cs201/gui/SimCity201.java
Removed the extra people in the constructor of SimCity201
Java
mit
2a183de36abb4388e33695e102a41da3f2a463e7
0
mzmine/mzmine3,mzmine/mzmine3
/* * Copyright 2006-2020 The MZmine Development Team * * This file is part of MZmine. * * MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General * Public License for more details. * * You should have received a copy of the GNU General Public License along with MZmine; if not, * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package io.github.mzmine.modules.io.import_mzxml; import com.google.common.base.Strings; import io.github.mzmine.datamodel.MZmineProject; import io.github.mzmine.datamodel.MassSpectrumType; import io.github.mzmine.datamodel.PolarityType; import io.github.mzmine.datamodel.RawDataFile; import io.github.mzmine.datamodel.impl.SimpleScan; import io.github.mzmine.taskcontrol.AbstractTask; import io.github.mzmine.taskcontrol.TaskStatus; import io.github.mzmine.util.CompressionUtils; import io.github.mzmine.util.ExceptionUtils; import io.github.mzmine.util.scans.ScanUtils; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.util.Base64; import java.util.Date; import java.util.LinkedList; import java.util.logging.Logger; import java.util.zip.DataFormatException; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.Duration; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * */ public class MzXMLImportTask extends AbstractTask { private Logger logger = Logger.getLogger(this.getClass().getName()); private File file; private MZmineProject project; private RawDataFile newMZmineFile; private int totalScans = 0, parsedScans; private int peaksCount = 0; private StringBuilder charBuffer; private boolean compressFlag = false; private DefaultHandler handler = new MzXMLHandler(); private String precision; // extracted values private float retentionTime = 0; private int scanNumber = 0; private int msLevel = 1; private PolarityType polarity = PolarityType.UNKNOWN; private String scanId = ""; private double precursorMz = 0d; private int precursorCharge = 0; // Retention time parser private DatatypeFactory dataTypeFactory; /* * This variables are used to set the number of fragments that one single scan can have. The * initial size of array is set to 10, but it depends of fragmentation level. */ private int parentTreeValue[] = new int[10]; private int msLevelTree = 0; /* * This stack stores the current scan and all his fragments until all the information is recover. * The logic is FIFO at the moment of write into the RawDataFile */ private LinkedList<SimpleScan> parentStack; /* * This variable hold the present scan or fragment, it is send to the stack when another * scan/fragment appears as a parser.startElement */ private SimpleScan buildingScan; public MzXMLImportTask(MZmineProject project, File fileToOpen, RawDataFile newMZmineFile) { // 256 kilo-chars buffer charBuffer = new StringBuilder(1 << 18); parentStack = new LinkedList<SimpleScan>(); this.project = project; this.file = fileToOpen; this.newMZmineFile = newMZmineFile; } /** * @see io.github.mzmine.taskcontrol.Task#getFinishedPercentage() */ @Override public double getFinishedPercentage() { return totalScans == 0 ? 0 : (double) parsedScans / totalScans; } /** * @see java.lang.Runnable#run() */ @Override public void run() { setStatus(TaskStatus.PROCESSING); logger.info("Started parsing file " + file); // Use the default (non-validating) parser SAXParserFactory factory = SAXParserFactory.newInstance(); try { dataTypeFactory = DatatypeFactory.newInstance(); SAXParser saxParser = factory.newSAXParser(); saxParser.parse(file, handler); project.addFile(newMZmineFile); } catch (Throwable e) { e.printStackTrace(); /* we may already have set the status to CANCELED */ if (getStatus() == TaskStatus.PROCESSING) { setStatus(TaskStatus.ERROR); setErrorMessage(ExceptionUtils.exceptionToString(e)); } return; } if (isCanceled()) { return; } if (parsedScans == 0) { setStatus(TaskStatus.ERROR); setErrorMessage("No scans found"); return; } logger.info("Finished parsing " + file + ", parsed " + parsedScans + " scans"); setStatus(TaskStatus.FINISHED); } @Override public String getTaskDescription() { return "Opening file " + file; } private class MzXMLHandler extends DefaultHandler { @Override public void startElement(String namespaceURI, String lName, // local // name String qName, // qualified name Attributes attrs) throws SAXException { if (isCanceled()) { throw new SAXException("Parsing Cancelled"); } // <msRun> if (qName.equals("msRun")) { String s = attrs.getValue("scanCount"); if (s != null) { totalScans = Integer.parseInt(s); } } // <scan> if (qName.equalsIgnoreCase("scan")) { if (buildingScan != null) { parentStack.addFirst(buildingScan); buildingScan = null; } /* * Only num, msLevel & peaksCount values are required according with mzxml standard, the * others are optional */ scanNumber = Integer.parseInt(attrs.getValue("num")); // mzXML files with empty msLevel attribute do exist, so we use // 1 as default msLevel = 1; if (!Strings.isNullOrEmpty(attrs.getValue("msLevel"))) { msLevel = Integer.parseInt(attrs.getValue("msLevel")); } String scanType = attrs.getValue("scanType"); String filterLine = attrs.getValue("filterLine"); scanId = filterLine; if (Strings.isNullOrEmpty(scanId)) { scanId = scanType; } String polarityAttr = attrs.getValue("polarity"); if ((polarityAttr != null) && (polarityAttr.length() == 1)) { polarity = PolarityType.fromSingleChar(polarityAttr); } else { polarity = PolarityType.UNKNOWN; } peaksCount = Integer.parseInt(attrs.getValue("peaksCount")); // Parse retention time String retentionTimeStr = attrs.getValue("retentionTime"); if (retentionTimeStr != null) { Date currentDate = new Date(); Duration dur = dataTypeFactory.newDuration(retentionTimeStr); retentionTime = (float) (dur.getTimeInMillis(currentDate) / 1000d / 60d); } else { setStatus(TaskStatus.ERROR); setErrorMessage("This file does not contain retentionTime for scans"); throw new SAXException("Could not read retention time"); } int parentScan = -1; if (msLevel > 9) { setStatus(TaskStatus.ERROR); setErrorMessage("msLevel value bigger than 10"); throw new SAXException("The value of msLevel is bigger than 10"); } /* * if (msLevel > 1) { parentScan = parentTreeValue[msLevel - 1]; for (SimpleScan p : * parentStack) { if (p.getScanNumber() == parentScan) { p.addFragmentScan(scanNumber); } } * } */ // Setting the level of fragment of scan and parent scan number msLevelTree++; parentTreeValue[msLevel] = scanNumber; } // <peaks> if (qName.equalsIgnoreCase("peaks")) { // clean the current char buffer for the new element charBuffer.setLength(0); compressFlag = false; String compressionType = attrs.getValue("compressionType"); if ((compressionType == null) || (compressionType.equals("none"))) { compressFlag = false; } else { compressFlag = true; } precision = attrs.getValue("precision"); } // <precursorMz> if (qName.equalsIgnoreCase("precursorMz")) { // clean the current char buffer for the new element charBuffer.setLength(0); String precursorChargeStr = attrs.getValue("precursorCharge"); if (precursorChargeStr != null) { precursorCharge = Integer.parseInt(precursorChargeStr); if (buildingScan != null) { buildingScan.setPrecursorCharge(precursorCharge); } } } } /** * endElement() */ @Override public void endElement(String namespaceURI, String sName, // simple name String qName // qualified name ) throws SAXException { // </scan> if (qName.equalsIgnoreCase("scan")) { msLevelTree--; /* * At this point we verify if the scan and his fragments are closed, so we include the * present scan/fragment into the stack and start to take elements from them (FIFO) for the * RawDataFile. */ if (msLevelTree == 0) { parentStack.addFirst(buildingScan); reset(); buildingScan = null; while (!parentStack.isEmpty()) { SimpleScan currentScan = parentStack.removeLast(); try { newMZmineFile.addScan(currentScan); } catch (IOException e) { e.printStackTrace(); setStatus(TaskStatus.ERROR); setErrorMessage("IO error: " + e); throw new SAXException("Parsing error: " + e); } parsedScans++; } /* * The scan with all his fragments is in the RawDataFile, now we clean the stack for the * next scan and fragments. */ parentStack.clear(); } return; } // <precursorMz> if (qName.equalsIgnoreCase("precursorMz")) { final String textContent = charBuffer.toString(); precursorMz = 0d; if (!textContent.isEmpty()) { precursorMz = Double.parseDouble(textContent); } if (buildingScan != null) { buildingScan.setPrecursorMZ(precursorMz); } return; } // <peaks> if (qName.equalsIgnoreCase("peaks")) { byte[] peakBytes = Base64.getDecoder().decode(charBuffer.toString()); if (compressFlag) { try { peakBytes = CompressionUtils.decompress(peakBytes); } catch (DataFormatException e) { setStatus(TaskStatus.ERROR); setErrorMessage("Corrupt compressed peak: " + e.toString()); throw new SAXException("Parsing Cancelled"); } } // make a data input stream DataInputStream peakStream = new DataInputStream(new ByteArrayInputStream(peakBytes)); double mzValues[] = new double[peaksCount]; double intensityValues[] = new double[peaksCount]; try { for (int i = 0; i < peaksCount; i++) { // Always respect this order pairOrder="m/z-int" double mz; double intensity; if ("64".equals(precision)) { mz = peakStream.readDouble(); intensity = peakStream.readDouble(); } else { mz = peakStream.readFloat(); intensity = peakStream.readFloat(); } // Copy m/z and intensity data mzValues[i] = mz; intensityValues[i] = intensity; } } catch (IOException eof) { setStatus(TaskStatus.ERROR); setErrorMessage("Corrupt mzXML file"); throw new SAXException("Parsing Cancelled"); } // Auto-detect whether this scan is centroided MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(mzValues, intensityValues); // Set the final data points to the scan buildingScan = new SimpleScan(newMZmineFile, scanNumber, msLevel, retentionTime, precursorMz, precursorCharge, mzValues, intensityValues, spectrumType, polarity, scanId, null); return; } } private void reset() { buildingScan = null; retentionTime = 0; scanNumber = 0; msLevel = 1; polarity = PolarityType.UNKNOWN; scanId = ""; precursorMz = 0d; precursorCharge = 0; } /** * characters() * * @see org.xml.sax.ContentHandler#characters(char[], int, int) */ @Override public void characters(char buf[], int offset, int len) throws SAXException { charBuffer.append(buf, offset, len); } } }
src/main/java/io/github/mzmine/modules/io/import_mzxml/MzXMLImportTask.java
/* * Copyright 2006-2020 The MZmine Development Team * * This file is part of MZmine. * * MZmine is free software; you can redistribute it and/or modify it under the terms of the GNU * General Public License as published by the Free Software Foundation; either version 2 of the * License, or (at your option) any later version. * * MZmine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even * the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General * Public License for more details. * * You should have received a copy of the GNU General Public License along with MZmine; if not, * write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package io.github.mzmine.modules.io.import_mzxml; import com.google.common.base.Strings; import io.github.mzmine.datamodel.MZmineProject; import io.github.mzmine.datamodel.MassSpectrumType; import io.github.mzmine.datamodel.PolarityType; import io.github.mzmine.datamodel.RawDataFile; import io.github.mzmine.datamodel.impl.SimpleScan; import io.github.mzmine.taskcontrol.AbstractTask; import io.github.mzmine.taskcontrol.TaskStatus; import io.github.mzmine.util.CompressionUtils; import io.github.mzmine.util.ExceptionUtils; import io.github.mzmine.util.scans.ScanUtils; import java.io.ByteArrayInputStream; import java.io.DataInputStream; import java.io.File; import java.io.IOException; import java.util.Base64; import java.util.Date; import java.util.LinkedList; import java.util.logging.Logger; import java.util.zip.DataFormatException; import javax.xml.datatype.DatatypeFactory; import javax.xml.datatype.Duration; import javax.xml.parsers.SAXParser; import javax.xml.parsers.SAXParserFactory; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import org.xml.sax.helpers.DefaultHandler; /** * */ public class MzXMLImportTask extends AbstractTask { private Logger logger = Logger.getLogger(this.getClass().getName()); private File file; private MZmineProject project; private RawDataFile newMZmineFile; private int totalScans = 0, parsedScans; private int peaksCount = 0; private StringBuilder charBuffer; private boolean compressFlag = false; private DefaultHandler handler = new MzXMLHandler(); private String precision; // Retention time parser private DatatypeFactory dataTypeFactory; /* * This variables are used to set the number of fragments that one single scan can have. The * initial size of array is set to 10, but it depends of fragmentation level. */ private int parentTreeValue[] = new int[10]; private int msLevelTree = 0; /* * This stack stores the current scan and all his fragments until all the information is recover. * The logic is FIFO at the moment of write into the RawDataFile */ private LinkedList<SimpleScan> parentStack; /* * This variable hold the present scan or fragment, it is send to the stack when another * scan/fragment appears as a parser.startElement */ private SimpleScan buildingScan; public MzXMLImportTask(MZmineProject project, File fileToOpen, RawDataFile newMZmineFile) { // 256 kilo-chars buffer charBuffer = new StringBuilder(1 << 18); parentStack = new LinkedList<SimpleScan>(); this.project = project; this.file = fileToOpen; this.newMZmineFile = newMZmineFile; } /** * @see io.github.mzmine.taskcontrol.Task#getFinishedPercentage() */ @Override public double getFinishedPercentage() { return totalScans == 0 ? 0 : (double) parsedScans / totalScans; } /** * @see java.lang.Runnable#run() */ @Override public void run() { setStatus(TaskStatus.PROCESSING); logger.info("Started parsing file " + file); // Use the default (non-validating) parser SAXParserFactory factory = SAXParserFactory.newInstance(); try { dataTypeFactory = DatatypeFactory.newInstance(); SAXParser saxParser = factory.newSAXParser(); saxParser.parse(file, handler); project.addFile(newMZmineFile); } catch (Throwable e) { e.printStackTrace(); /* we may already have set the status to CANCELED */ if (getStatus() == TaskStatus.PROCESSING) { setStatus(TaskStatus.ERROR); setErrorMessage(ExceptionUtils.exceptionToString(e)); } return; } if (isCanceled()) { return; } if (parsedScans == 0) { setStatus(TaskStatus.ERROR); setErrorMessage("No scans found"); return; } logger.info("Finished parsing " + file + ", parsed " + parsedScans + " scans"); setStatus(TaskStatus.FINISHED); } @Override public String getTaskDescription() { return "Opening file " + file; } private class MzXMLHandler extends DefaultHandler { @Override public void startElement(String namespaceURI, String lName, // local // name String qName, // qualified name Attributes attrs) throws SAXException { if (isCanceled()) { throw new SAXException("Parsing Cancelled"); } // <msRun> if (qName.equals("msRun")) { String s = attrs.getValue("scanCount"); if (s != null) { totalScans = Integer.parseInt(s); } } // <scan> if (qName.equalsIgnoreCase("scan")) { if (buildingScan != null) { parentStack.addFirst(buildingScan); buildingScan = null; } /* * Only num, msLevel & peaksCount values are required according with mzxml standard, the * others are optional */ int scanNumber = Integer.parseInt(attrs.getValue("num")); // mzXML files with empty msLevel attribute do exist, so we use // 1 as default int msLevel = 1; if (!Strings.isNullOrEmpty(attrs.getValue("msLevel"))) { msLevel = Integer.parseInt(attrs.getValue("msLevel")); } String scanType = attrs.getValue("scanType"); String filterLine = attrs.getValue("filterLine"); String scanId = filterLine; if (Strings.isNullOrEmpty(scanId)) { scanId = scanType; } PolarityType polarity; String polarityAttr = attrs.getValue("polarity"); if ((polarityAttr != null) && (polarityAttr.length() == 1)) { polarity = PolarityType.fromSingleChar(polarityAttr); } else { polarity = PolarityType.UNKNOWN; } peaksCount = Integer.parseInt(attrs.getValue("peaksCount")); // Parse retention time float retentionTime = 0; String retentionTimeStr = attrs.getValue("retentionTime"); if (retentionTimeStr != null) { Date currentDate = new Date(); Duration dur = dataTypeFactory.newDuration(retentionTimeStr); retentionTime = (float) (dur.getTimeInMillis(currentDate) / 1000d / 60d); } else { setStatus(TaskStatus.ERROR); setErrorMessage("This file does not contain retentionTime for scans"); throw new SAXException("Could not read retention time"); } int parentScan = -1; if (msLevel > 9) { setStatus(TaskStatus.ERROR); setErrorMessage("msLevel value bigger than 10"); throw new SAXException("The value of msLevel is bigger than 10"); } /* * if (msLevel > 1) { parentScan = parentTreeValue[msLevel - 1]; for (SimpleScan p : * parentStack) { if (p.getScanNumber() == parentScan) { p.addFragmentScan(scanNumber); } } * } */ // Setting the level of fragment of scan and parent scan number msLevelTree++; parentTreeValue[msLevel] = scanNumber; buildingScan = new SimpleScan(newMZmineFile, scanNumber, msLevel, retentionTime, 0, 0, new double[0], new double[0], null, polarity, scanId, null); } // <peaks> if (qName.equalsIgnoreCase("peaks")) { // clean the current char buffer for the new element charBuffer.setLength(0); compressFlag = false; String compressionType = attrs.getValue("compressionType"); if ((compressionType == null) || (compressionType.equals("none"))) { compressFlag = false; } else { compressFlag = true; } precision = attrs.getValue("precision"); } // <precursorMz> if (qName.equalsIgnoreCase("precursorMz")) { // clean the current char buffer for the new element charBuffer.setLength(0); String precursorCharge = attrs.getValue("precursorCharge"); if (precursorCharge != null) { buildingScan.setPrecursorCharge(Integer.parseInt(precursorCharge)); } } } /** * endElement() */ @Override public void endElement(String namespaceURI, String sName, // simple name String qName // qualified name ) throws SAXException { // </scan> if (qName.equalsIgnoreCase("scan")) { msLevelTree--; /* * At this point we verify if the scan and his fragments are closed, so we include the * present scan/fragment into the stack and start to take elements from them (FIFO) for the * RawDataFile. */ if (msLevelTree == 0) { parentStack.addFirst(buildingScan); buildingScan = null; while (!parentStack.isEmpty()) { SimpleScan currentScan = parentStack.removeLast(); try { newMZmineFile.addScan(currentScan); } catch (IOException e) { e.printStackTrace(); setStatus(TaskStatus.ERROR); setErrorMessage("IO error: " + e); throw new SAXException("Parsing error: " + e); } parsedScans++; } /* * The scan with all his fragments is in the RawDataFile, now we clean the stack for the * next scan and fragments. */ parentStack.clear(); } return; } // <precursorMz> if (qName.equalsIgnoreCase("precursorMz")) { final String textContent = charBuffer.toString(); double precursorMz = 0d; if (!textContent.isEmpty()) { precursorMz = Double.parseDouble(textContent); } buildingScan.setPrecursorMZ(precursorMz); return; } // <peaks> if (qName.equalsIgnoreCase("peaks")) { byte[] peakBytes = Base64.getDecoder().decode(charBuffer.toString()); if (compressFlag) { try { peakBytes = CompressionUtils.decompress(peakBytes); } catch (DataFormatException e) { setStatus(TaskStatus.ERROR); setErrorMessage("Corrupt compressed peak: " + e.toString()); throw new SAXException("Parsing Cancelled"); } } // make a data input stream DataInputStream peakStream = new DataInputStream(new ByteArrayInputStream(peakBytes)); double mzValues[] = new double[peaksCount]; double intensityValues[] = new double[peaksCount]; try { for (int i = 0; i < peaksCount; i++) { // Always respect this order pairOrder="m/z-int" double mz; double intensity; if ("64".equals(precision)) { mz = peakStream.readDouble(); intensity = peakStream.readDouble(); } else { mz = peakStream.readFloat(); intensity = peakStream.readFloat(); } // Copy m/z and intensity data mzValues[i] = mz; intensityValues[i] = intensity; } } catch (IOException eof) { setStatus(TaskStatus.ERROR); setErrorMessage("Corrupt mzXML file"); throw new SAXException("Parsing Cancelled"); } // Auto-detect whether this scan is centroided MassSpectrumType spectrumType = ScanUtils.detectSpectrumType(mzValues, intensityValues); // Set the centroided tag buildingScan.setSpectrumType(spectrumType); // Set the final data points to the scan // This line awaits you, Robin: ~SteffenHeu :) // buildingScan.setDataPoints(mzValues, intensityValues); return; } } /** * characters() * * @see org.xml.sax.ContentHandler#characters(char[], int, int) */ @Override public void characters(char buf[], int offset, int len) throws SAXException { charBuffer.append(buf, offset, len); } } }
Fix mzxML import without setDataPoints method
src/main/java/io/github/mzmine/modules/io/import_mzxml/MzXMLImportTask.java
Fix mzxML import without setDataPoints method
Java
mit
23852c6e43f6060b56274d208dd806c5b50ca2f4
0
mick88/filemanager,jsavage/filemanager,AiJiaZone/filemanager
/******************************************************************************* * Copyright (c) 2014 Michal Dabski * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ******************************************************************************/ package com.michaldabski.filemanager.folders; import android.annotation.TargetApi; import android.app.AlertDialog; import android.app.Dialog; import android.app.Fragment; import android.app.ProgressDialog; import android.content.ActivityNotFoundException; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.ActionMode; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.AbsListView; import android.widget.AbsListView.MultiChoiceModeListener; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.EditText; import android.widget.HeaderViewListAdapter; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.ShareActionProvider; import android.widget.TextView; import android.widget.Toast; import com.michaldabski.filemanager.AppPreferences; import com.michaldabski.filemanager.FileManagerApplication; import com.michaldabski.filemanager.R; import com.michaldabski.filemanager.clipboard.Clipboard; import com.michaldabski.filemanager.clipboard.Clipboard.FileAction; import com.michaldabski.filemanager.clipboard.FileOperationListener; import com.michaldabski.filemanager.favourites.FavouriteFolder; import com.michaldabski.filemanager.favourites.FavouritesManager; import com.michaldabski.filemanager.favourites.FavouritesManager.FolderAlreadyFavouriteException; import com.michaldabski.filemanager.folders.FileAdapter.OnFileSelectedListener; import com.michaldabski.utils.AsyncResult; import com.michaldabski.utils.FilePreviewCache; import com.michaldabski.utils.FileUtils; import com.michaldabski.utils.FontApplicator; import com.michaldabski.utils.IntentUtils; import com.michaldabski.utils.ListViewUtils; import com.michaldabski.utils.OnResultListener; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; public class FolderFragment extends Fragment implements OnItemClickListener, OnScrollListener, OnItemLongClickListener, MultiChoiceModeListener, OnFileSelectedListener { private static final String LOG_TAG = "FolderFragment"; private final int DISTANCE_TO_HIDE_ACTIONBAR = 0; public static final String EXTRA_DIR = "directory", EXTRA_SELECTED_FILES = "selected_files", EXTRA_SCROLL_POSITION = "scroll_position"; File currentDir, nextDir = null; int topVisibleItem=0; List<File> files = null; @SuppressWarnings("rawtypes") AsyncTask loadFilesTask=null; AbsListView listView = null; FileAdapter fileAdapter; private ActionMode actionMode = null; private final HashSet<File> selectedFiles = new HashSet<File>(); private ShareActionProvider shareActionProvider; // set to true when selection shouldnt be cleared from switching out fragments boolean preserveSelection = false; FilePreviewCache thumbCache; public AbsListView getListView() { return listView; } private void setListAdapter(FileAdapter fileAdapter) { this.fileAdapter = fileAdapter; if (listView != null) { listView.setAdapter(fileAdapter); listView.setSelection(topVisibleItem); getView().findViewById(R.id.layoutMessage).setVisibility(View.GONE); listView.setVisibility(View.VISIBLE); } } FontApplicator getFontApplicator() { FolderActivity folderActivity = (FolderActivity) getActivity(); return folderActivity.getFontApplicator(); } void showProgress() { if (getView() != null) { getListView().setVisibility(View.GONE); getView().findViewById(R.id.layoutMessage).setVisibility(View.VISIBLE); getView().findViewById(R.id.tvMessage).setVisibility(View.GONE); } } FileManagerApplication getApplication() { if (getActivity() == null) return null; return (FileManagerApplication) getActivity().getApplication(); } AppPreferences getPreferences() { if (getApplication() == null) return null; return getApplication().getAppPreferences(); } @SuppressWarnings("unchecked") @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); Log.d(LOG_TAG, "Fragment created"); if (savedInstanceState != null) { this.topVisibleItem = savedInstanceState.getInt(EXTRA_SCROLL_POSITION, 0); this.selectedFiles.addAll((HashSet<File>) savedInstanceState.getSerializable(EXTRA_SELECTED_FILES)); } Bundle arguments = getArguments(); if (arguments != null && arguments.containsKey(EXTRA_DIR)) currentDir = new File(arguments.getString(EXTRA_DIR)); else currentDir = getPreferences().getStartFolder(); setHasOptionsMenu(true); loadFileList(); } void showMessage(CharSequence message) { View view = getView(); if (view != null) { getListView().setVisibility(View.GONE); view.findViewById(R.id.layoutMessage).setVisibility(View.VISIBLE); view.findViewById(R.id.progress).setVisibility(View.GONE); TextView tvMessage = (TextView) view.findViewById(R.id.tvMessage); tvMessage.setText(message); } } void showMessage(int message) { showMessage(getString(message)); } void showList() { getListView().setVisibility(View.VISIBLE); getView().findViewById(R.id.layoutMessage).setVisibility(View.GONE); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_list, container, false); this.listView = (AbsListView) view.findViewById(android.R.id.list); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) listView.setFastScrollAlwaysVisible(true); return view; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1) @Override public void onLowMemory() { super.onLowMemory(); if (thumbCache != null) { if (getView() == null || Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) thumbCache.evictAll(); else thumbCache.trimToSize(1024*1024); } } void loadFileList() { if (loadFilesTask != null) return; this.loadFilesTask = new AsyncTask<File, Void, AsyncResult<File[]>>() { @Override protected AsyncResult<File[]> doInBackground(File... params) { try { File[] files =params[0].listFiles(FileUtils.DEFAULT_FILE_FILTER); if (files == null) throw new NullPointerException(getString(R.string.cannot_read_directory_s, params[0].getName())); if (isCancelled()) throw new Exception("Task cancelled"); Arrays.sort(files, getPreferences().getFileSortingComparator()); return new AsyncResult<File[]>(files); } catch (Exception e) { return new AsyncResult<File[]>(e); } } @Override protected void onCancelled(AsyncResult<File[]> result) { loadFilesTask = null; } @Override protected void onPostExecute(AsyncResult<File[]> result) { Log.d("folder fragment", "Task finished"); loadFilesTask = null; FileAdapter adapter; try { files = Arrays.asList(result.getResult()); if (files.isEmpty()) { showMessage(R.string.folder_empty); return; } adapter = new FileAdapter(getActivity(), files, getApplication().getFileIconResolver()); final int cardPreference = getPreferences().getCardLayout(); if (cardPreference == AppPreferences.CARD_LAYOUT_ALWAYS || (cardPreference == AppPreferences.CARD_LAYOUT_MEDIA && FileUtils.isMediaDirectory(currentDir))) { if (thumbCache == null) thumbCache = new FilePreviewCache(); adapter = new FileCardAdapter(getActivity(), files, thumbCache, getApplication().getFileIconResolver()); } else adapter = new FileAdapter(getActivity(), files, getApplication().getFileIconResolver()); adapter.setSelectedFiles(selectedFiles); adapter.setOnFileSelectedListener(FolderFragment.this); adapter.setFontApplicator(getFontApplicator()); setListAdapter(adapter); } catch (Exception e) { // exception was thrown while loading files showMessage(e.getMessage()); adapter = new FileAdapter(getActivity(), getApplication().getFileIconResolver()); } getActivity().invalidateOptionsMenu(); } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, currentDir); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.folder_browser, menu); menu.findItem(R.id.menu_selectAll).setVisible(!(files == null || files.isEmpty())); if (getApplication().getFavouritesManager().isFolderFavourite(currentDir)) { menu.findItem(R.id.menu_unfavourite).setVisible(true); menu.findItem(R.id.menu_favourite).setVisible(false); } else { menu.findItem(R.id.menu_unfavourite).setVisible(false); menu.findItem(R.id.menu_favourite).setVisible(true); } } @Override public void onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); menu.findItem(R.id.menu_paste).setVisible(Clipboard.getInstance().isEmpty() == false); menu.findItem(R.id.menu_navigate_up).setVisible(currentDir.getParentFile() != null); } void showEditTextDialog(int title, int okButtonText, final OnResultListener<CharSequence> enteredTextResult, CharSequence hint, CharSequence defaultValue) { View view = getActivity().getLayoutInflater().inflate(R.layout.dialog_edittext, (ViewGroup) getActivity().getWindow().getDecorView(), false); final EditText editText = (EditText) view.findViewById(android.R.id.edit); editText.setHint(hint); editText.setText(defaultValue); if (TextUtils.isEmpty(defaultValue) == false) { int end = defaultValue.toString().indexOf('.'); if (end > 0) editText.setSelection(0, end); } final Dialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(title) .setView(view) .setPositiveButton(okButtonText, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { enteredTextResult.onResult(new AsyncResult<CharSequence>(editText.getText())); } }) .setNegativeButton(android.R.string.cancel, null) .create(); dialog.show(); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_selectAll: selectFiles(this.files); return true; case R.id.menu_navigate_up: String newFolder = currentDir.getParent(); if (newFolder != null) { Bundle args = new Bundle(1); args.putString(EXTRA_DIR, newFolder); FolderFragment fragment = new FolderFragment(); fragment.setArguments(args); FolderActivity activity = (FolderActivity) getActivity(); activity.showFragment(fragment); } return true; case R.id.menu_favourite: try { final String directoryName = FileUtils.getFolderDisplayName(currentDir); FavouritesManager favouritesManager = getApplication().getFavouritesManager(); favouritesManager.addFavourite(new FavouriteFolder(currentDir, directoryName)); getActivity().invalidateOptionsMenu(); } catch (FolderAlreadyFavouriteException e1) { e1.printStackTrace(); } return true; case R.id.menu_unfavourite: FavouritesManager favouritesManager = getApplication().getFavouritesManager(); favouritesManager.removeFavourite(currentDir); getActivity().invalidateOptionsMenu(); return true; case R.id.menu_create_folder: showEditTextDialog(R.string.create_folder, R.string.create, new OnResultListener<CharSequence>() { @Override public void onResult(AsyncResult<CharSequence> result) { try { String name = result.getResult().toString(); File newFolder = new File(currentDir, name); if (newFolder.mkdirs()) { refreshFolder(); Toast.makeText(getActivity(), R.string.folder_created_successfully, Toast.LENGTH_SHORT).show(); navigateTo(newFolder); } else Toast.makeText(getActivity(), R.string.folder_could_not_be_created, Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } } }, "", ""); return true; case R.id.menu_paste: pasteFiles(); return true; case R.id.menu_refresh: refreshFolder(); return true; } return super.onOptionsItemSelected(item); } public void pasteFiles() { new AsyncTask<Clipboard, Float, Exception>() { ProgressDialog progressDialog; @Override protected void onPreExecute() { super.onPreExecute(); progressDialog = new ProgressDialog(getActivity()); progressDialog.setTitle(getActivity().getString(R.string.pasting_files_)); progressDialog.setIndeterminate(false); progressDialog.setCancelable(false); progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressDialog.show(); } @Override protected void onProgressUpdate(Float... values) { float progress = values[0]; progressDialog.setMax(100); progressDialog.setProgress((int) (progress * 100)); } @Override protected Exception doInBackground(Clipboard... params) { try { final int total = FileUtils.countFilesIn(params[0].getFiles()); final int[] progress = {0}; params[0].paste(currentDir, new FileOperationListener() { @Override public void onFileProcessed(String filename) { progress[0]++; publishProgress((float)progress[0] / (float)total); } @Override public boolean isOperationCancelled() { return isCancelled(); } }); return null; } catch (IOException e) { e.printStackTrace(); return e; } } @Override protected void onCancelled() { progressDialog.dismiss(); refreshFolder(); } @Override protected void onPostExecute(Exception result) { progressDialog.dismiss(); refreshFolder(); if (result == null) { Clipboard.getInstance().clear(); Toast.makeText(getActivity(), R.string.files_pasted, Toast.LENGTH_SHORT).show(); } else { new AlertDialog.Builder(getActivity()) .setMessage(result.getMessage()) .setPositiveButton(android.R.string.ok, null) .show(); } } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, Clipboard.getInstance()); } @Override public void onViewCreated(View view, final Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); getFontApplicator().applyFont(view); loadFileList(); if (selectedFiles.isEmpty() == false) { selectFiles(selectedFiles); } final String directoryName = FileUtils.getFolderDisplayName(currentDir); getActivity().setTitle(directoryName); getListView().setOnItemClickListener(FolderFragment.this); getListView().setOnScrollListener(this); getListView().setOnItemLongClickListener(this); getListView().setMultiChoiceModeListener(this); getActivity().getActionBar().setSubtitle(FileUtils.getUserFriendlySdcardPath(currentDir)); if (topVisibleItem <= DISTANCE_TO_HIDE_ACTIONBAR) setActionbarVisibility(true); // add listview header to push items below the actionbar ListViewUtils.addListViewHeader(getListView(), getActivity()); if (fileAdapter != null) setListAdapter(fileAdapter); FolderActivity activity = (FolderActivity) getActivity(); activity.setLastFolder(currentDir); } @Override public void onDestroyView() { finishActionMode(true); listView = null; super.onDestroyView(); } @Override public void onDestroy() { if (loadFilesTask != null) loadFilesTask.cancel(true); if (thumbCache != null) thumbCache.evictAll(); super.onDestroy(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(EXTRA_SCROLL_POSITION, topVisibleItem); outState.putSerializable(EXTRA_SELECTED_FILES, selectedFiles); } void navigateTo(File folder) { nextDir = folder; FolderActivity activity = (FolderActivity) getActivity(); FolderFragment fragment = new FolderFragment(); Bundle args = new Bundle(); args.putString(EXTRA_DIR, folder.getAbsolutePath()); fragment.setArguments(args); activity.showFragment(fragment); } void openFile(File file) { if (file.isDirectory()) throw new IllegalArgumentException("File cannot be a directory!"); Intent intent = IntentUtils.createFileOpenIntent(file); try { startActivity(intent); } catch (ActivityNotFoundException e) { startActivity(Intent.createChooser(intent, getString(R.string.open_file_with_, file.getName()))); } catch (Exception e) { new AlertDialog.Builder(getActivity()) .setMessage(e.getMessage()) .setTitle(R.string.error) .setPositiveButton(android.R.string.ok, null) .show(); } } @Override public void onItemClick(AdapterView<?> adapterView, View arg1, int position, long arg3) { Object selectedObject = adapterView.getItemAtPosition(position); if (selectedObject instanceof File) { if (actionMode == null) { File selectedFile = (File) selectedObject; if (selectedFile.isDirectory()) navigateTo(selectedFile); else openFile(selectedFile); } else { toggleFileSelected((File) selectedObject); } } } void setActionbarVisibility(boolean visible) { if (actionMode == null || visible == true) // cannot hide CAB ((FolderActivity) getActivity()).setActionbarVisible(visible); } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { if (firstVisibleItem < this.topVisibleItem - DISTANCE_TO_HIDE_ACTIONBAR) { setActionbarVisibility(true); this.topVisibleItem = firstVisibleItem; } else if (firstVisibleItem > this.topVisibleItem + DISTANCE_TO_HIDE_ACTIONBAR) { setActionbarVisibility(false); this.topVisibleItem = firstVisibleItem; } ListAdapter adapter = view.getAdapter(); if (adapter instanceof HeaderViewListAdapter) { HeaderViewListAdapter headerViewListAdapter = (HeaderViewListAdapter) adapter; if (headerViewListAdapter.getWrappedAdapter() instanceof FileCardAdapter) { int startPrefetch = firstVisibleItem + visibleItemCount-headerViewListAdapter.getHeadersCount(); ((FileCardAdapter) headerViewListAdapter.getWrappedAdapter()).prefetchImages(startPrefetch, visibleItemCount); } } } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { } @Override public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { setFileSelected((File) arg0.getItemAtPosition(arg2), true); return true; } void showFileInfo(Collection<File> files) { final CharSequence title; final StringBuilder message = new StringBuilder(); if (files.size() == 1) title = ((File) files.toArray()[0]).getName(); else title = getString(R.string._d_objects, files.size()); if (files.size() > 1) message.append(FileUtils.combineFileNames(files)).append("\n\n"); message.append(getString(R.string.size_s, FileUtils.formatFileSize(files))).append('\n'); message.append(getString(R.string.mime_type_s, FileUtils.getCollectiveMimeType(files))); new AlertDialog.Builder(getActivity()) .setTitle(title) .setMessage(message) .setPositiveButton(android.R.string.ok, null) .show(); } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.action_delete: new AlertDialog.Builder(getActivity()) .setMessage(getString(R.string.delete_d_items_, selectedFiles.size())) .setPositiveButton(R.string.delete, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { int n = FileUtils.deleteFiles(selectedFiles); Toast.makeText(getActivity(), getString(R.string._d_files_deleted, n), Toast.LENGTH_SHORT).show(); refreshFolder(); finishActionMode(false); } }) .setNegativeButton(android.R.string.cancel, null) .show(); return true; case R.id.action_selectAll: if (isEverythingSelected()) clearFileSelection(); else selectFiles(files); return true; case R.id.action_info: if (selectedFiles.isEmpty()) return true; showFileInfo(selectedFiles); return true; case R.id.action_copy: Clipboard.getInstance().addFiles(selectedFiles, FileAction.Copy); Toast.makeText(getActivity(), R.string.objects_copied_to_clipboard, Toast.LENGTH_SHORT).show(); finishActionMode(false); return true; case R.id.action_cut: Clipboard clipboard = Clipboard.getInstance(); clipboard.addFiles(selectedFiles, FileAction.Cut); Toast.makeText(getActivity(), R.string.objects_cut_to_clipboard, Toast.LENGTH_SHORT).show(); finishActionMode(false); return true; case R.id.action_rename: final File fileToRename = (File) selectedFiles.toArray()[0]; showEditTextDialog(fileToRename.isDirectory()?R.string.rename_folder:R.string.rename_file, R.string.rename, new OnResultListener<CharSequence>() { @Override public void onResult(AsyncResult<CharSequence> result) { try { String newName = result.getResult().toString(); if (fileToRename.renameTo(new File(fileToRename.getParentFile(), newName))) { finishActionMode(false); refreshFolder(); Toast.makeText(getActivity(), R.string.file_renamed, Toast.LENGTH_SHORT).show(); } else Toast.makeText(getActivity(), getActivity().getString(R.string.file_could_not_be_renamed_to_s, newName), Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } } }, fileToRename.getName(), fileToRename.getName()); return true; case R.id.menu_add_homescreen_icon: for (File file : selectedFiles) IntentUtils.createShortcut(getActivity(), file); Toast.makeText(getActivity(), R.string.shortcut_created, Toast.LENGTH_SHORT).show(); actionMode.finish(); return true; } return false; } protected void refreshFolder() { showProgress(); loadFileList(); } void updateActionMode() { if (actionMode != null) { actionMode.invalidate(); int count = selectedFiles.size(); actionMode.setTitle(getString(R.string._d_objects, count)); actionMode.setSubtitle(FileUtils.combineFileNames(selectedFiles)); if (shareActionProvider != null) { final Intent shareIntent; if (selectedFiles.isEmpty()) shareIntent = null; else if (selectedFiles.size() == 1) { File file = (File) selectedFiles.toArray()[0]; shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.setType(FileUtils.getFileMimeType(file)); shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(file)); } else { ArrayList<Uri> fileUris = new ArrayList<Uri>(selectedFiles.size()); for (File file : selectedFiles) if (file.isDirectory() == false) { fileUris.add(Uri.fromFile(file)); } shareIntent = new Intent(Intent.ACTION_SEND_MULTIPLE); shareIntent.putParcelableArrayListExtra(Intent.EXTRA_STREAM, fileUris); shareIntent.setType(FileUtils.getCollectiveMimeType(selectedFiles)); } shareActionProvider.setShareIntent(shareIntent); } } } @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { setActionbarVisibility(true); getActivity().getMenuInflater().inflate(R.menu.action_file, menu); getActivity().getMenuInflater().inflate(R.menu.action_file_single, menu); MenuItem shareMenuItem = menu.findItem(R.id.action_share); shareActionProvider = (ShareActionProvider) shareMenuItem.getActionProvider(); this.preserveSelection = false; return true; } void finishSelection() { if (listView != null) listView.setChoiceMode(ListView.CHOICE_MODE_NONE); clearFileSelection(); } void finishActionMode(boolean preserveSelection) { this.preserveSelection = preserveSelection; if (actionMode != null) actionMode.finish(); } @Override public void onDestroyActionMode(ActionMode mode) { actionMode = null; shareActionProvider = null; if (preserveSelection == false) finishSelection(); Log.d(LOG_TAG, "Action mode destroyed"); } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { int count = selectedFiles.size(); if (count == 1) { menu.findItem(R.id.action_rename).setVisible(true); menu.findItem(R.id.menu_add_homescreen_icon).setTitle(R.string.add_to_homescreen); } else { menu.findItem(R.id.action_rename).setVisible(false); menu.findItem(R.id.menu_add_homescreen_icon).setTitle(R.string.add_to_homescreen_multiple); } // show Share button if no folder was selected boolean allowShare = (count > 0); if (allowShare) { for (File file : selectedFiles) if (file.isDirectory()) { allowShare = false; break; } } menu.findItem(R.id.action_share).setVisible(allowShare); return true; } @Override public void onItemCheckedStateChanged(ActionMode mode, int position, long id, boolean checked) { } void toggleFileSelected(File file) { setFileSelected(file, !selectedFiles.contains(file)); } void clearFileSelection() { if (listView != null) listView.clearChoices(); selectedFiles.clear(); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); Log.d(LOG_TAG, "Selection cleared"); } boolean isEverythingSelected() { return selectedFiles.size() == files.size(); } void selectFiles(Collection<File> files) { if (files == null || files.isEmpty()) return; if (actionMode == null) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); actionMode = getActivity().startActionMode(this); } selectedFiles.addAll(files); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); } void setFileSelected(File file, boolean selected) { if (actionMode == null) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); actionMode = getActivity().startActionMode(this); } if (selected) selectedFiles.add(file); else selectedFiles.remove(file); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); if (selectedFiles.isEmpty()) finishActionMode(false); } @Override public void onFileSelected(File file) { toggleFileSelected(file); } }
src/com/michaldabski/filemanager/folders/FolderFragment.java
/******************************************************************************* * Copyright (c) 2014 Michal Dabski * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ******************************************************************************/ package com.michaldabski.filemanager.folders; import android.annotation.TargetApi; import android.app.AlertDialog; import android.app.Dialog; import android.app.Fragment; import android.app.ProgressDialog; import android.content.ActivityNotFoundException; import android.content.DialogInterface; import android.content.DialogInterface.OnClickListener; import android.content.Intent; import android.net.Uri; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.ActionMode; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.view.WindowManager; import android.widget.AbsListView; import android.widget.AbsListView.MultiChoiceModeListener; import android.widget.AbsListView.OnScrollListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemClickListener; import android.widget.AdapterView.OnItemLongClickListener; import android.widget.EditText; import android.widget.HeaderViewListAdapter; import android.widget.ListAdapter; import android.widget.ListView; import android.widget.ShareActionProvider; import android.widget.TextView; import android.widget.Toast; import com.michaldabski.filemanager.AppPreferences; import com.michaldabski.filemanager.FileManagerApplication; import com.michaldabski.filemanager.R; import com.michaldabski.filemanager.clipboard.Clipboard; import com.michaldabski.filemanager.clipboard.Clipboard.FileAction; import com.michaldabski.filemanager.clipboard.FileOperationListener; import com.michaldabski.filemanager.favourites.FavouriteFolder; import com.michaldabski.filemanager.favourites.FavouritesManager; import com.michaldabski.filemanager.favourites.FavouritesManager.FolderAlreadyFavouriteException; import com.michaldabski.filemanager.folders.FileAdapter.OnFileSelectedListener; import com.michaldabski.utils.AsyncResult; import com.michaldabski.utils.FilePreviewCache; import com.michaldabski.utils.FileUtils; import com.michaldabski.utils.FontApplicator; import com.michaldabski.utils.IntentUtils; import com.michaldabski.utils.ListViewUtils; import com.michaldabski.utils.OnResultListener; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; public class FolderFragment extends Fragment implements OnItemClickListener, OnScrollListener, OnItemLongClickListener, MultiChoiceModeListener, OnFileSelectedListener { private static final String LOG_TAG = "FolderFragment"; private final int DISTANCE_TO_HIDE_ACTIONBAR = 0; public static final String EXTRA_DIR = "directory", EXTRA_SELECTED_FILES = "selected_files", EXTRA_SCROLL_POSITION = "scroll_position"; File currentDir, nextDir = null; int topVisibleItem=0; List<File> files = null; @SuppressWarnings("rawtypes") AsyncTask loadFilesTask=null; AbsListView listView = null; FileAdapter fileAdapter; private ActionMode actionMode = null; private final HashSet<File> selectedFiles = new HashSet<File>(); private ShareActionProvider shareActionProvider; // set to true when selection shouldnt be cleared from switching out fragments boolean preserveSelection = false; FilePreviewCache thumbCache; public AbsListView getListView() { return listView; } private void setListAdapter(FileAdapter fileAdapter) { this.fileAdapter = fileAdapter; if (listView != null) { listView.setAdapter(fileAdapter); listView.setSelection(topVisibleItem); getView().findViewById(R.id.layoutMessage).setVisibility(View.GONE); listView.setVisibility(View.VISIBLE); } } FontApplicator getFontApplicator() { FolderActivity folderActivity = (FolderActivity) getActivity(); return folderActivity.getFontApplicator(); } void showProgress() { if (getView() != null) { getListView().setVisibility(View.GONE); getView().findViewById(R.id.layoutMessage).setVisibility(View.VISIBLE); getView().findViewById(R.id.tvMessage).setVisibility(View.GONE); } } FileManagerApplication getApplication() { if (getActivity() == null) return null; return (FileManagerApplication) getActivity().getApplication(); } AppPreferences getPreferences() { if (getApplication() == null) return null; return getApplication().getAppPreferences(); } @SuppressWarnings("unchecked") @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setRetainInstance(true); Log.d(LOG_TAG, "Fragment created"); if (savedInstanceState != null) { this.topVisibleItem = savedInstanceState.getInt(EXTRA_SCROLL_POSITION, 0); this.selectedFiles.addAll((HashSet<File>) savedInstanceState.getSerializable(EXTRA_SELECTED_FILES)); } Bundle arguments = getArguments(); if (arguments != null && arguments.containsKey(EXTRA_DIR)) currentDir = new File(arguments.getString(EXTRA_DIR)); else currentDir = getPreferences().getStartFolder(); setHasOptionsMenu(true); loadFileList(); } void showMessage(CharSequence message) { View view = getView(); if (view != null) { getListView().setVisibility(View.GONE); view.findViewById(R.id.layoutMessage).setVisibility(View.VISIBLE); view.findViewById(R.id.progress).setVisibility(View.GONE); TextView tvMessage = (TextView) view.findViewById(R.id.tvMessage); tvMessage.setText(message); } } void showMessage(int message) { showMessage(getString(message)); } void showList() { getListView().setVisibility(View.VISIBLE); getView().findViewById(R.id.layoutMessage).setVisibility(View.GONE); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View view = inflater.inflate(R.layout.fragment_list, container, false); this.listView = (AbsListView) view.findViewById(android.R.id.list); if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) listView.setFastScrollAlwaysVisible(true); return view; } @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1) @Override public void onLowMemory() { super.onLowMemory(); if (thumbCache != null) { if (getView() == null || Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN_MR1) thumbCache.evictAll(); else thumbCache.trimToSize(1024*1024); } } void loadFileList() { if (loadFilesTask != null) return; this.loadFilesTask = new AsyncTask<File, Void, AsyncResult<File[]>>() { @Override protected AsyncResult<File[]> doInBackground(File... params) { try { File[] files =params[0].listFiles(FileUtils.DEFAULT_FILE_FILTER); if (files == null) throw new NullPointerException(getString(R.string.cannot_read_directory_s, params[0].getName())); if (isCancelled()) throw new Exception("Task cancelled"); Arrays.sort(files, getPreferences().getFileSortingComparator()); return new AsyncResult<File[]>(files); } catch (Exception e) { return new AsyncResult<File[]>(e); } } @Override protected void onCancelled(AsyncResult<File[]> result) { loadFilesTask = null; } @Override protected void onPostExecute(AsyncResult<File[]> result) { Log.d("folder fragment", "Task finished"); loadFilesTask = null; FileAdapter adapter; try { files = Arrays.asList(result.getResult()); if (files.isEmpty()) { showMessage(R.string.folder_empty); return; } adapter = new FileAdapter(getActivity(), files, getApplication().getFileIconResolver()); final int cardPreference = getPreferences().getCardLayout(); if (cardPreference == AppPreferences.CARD_LAYOUT_ALWAYS || (cardPreference == AppPreferences.CARD_LAYOUT_MEDIA && FileUtils.isMediaDirectory(currentDir))) { if (thumbCache == null) thumbCache = new FilePreviewCache(); adapter = new FileCardAdapter(getActivity(), files, thumbCache, getApplication().getFileIconResolver()); } else adapter = new FileAdapter(getActivity(), files, getApplication().getFileIconResolver()); adapter.setSelectedFiles(selectedFiles); adapter.setOnFileSelectedListener(FolderFragment.this); adapter.setFontApplicator(getFontApplicator()); setListAdapter(adapter); } catch (Exception e) { // exception was thrown while loading files showMessage(e.getMessage()); adapter = new FileAdapter(getActivity(), getApplication().getFileIconResolver()); } getActivity().invalidateOptionsMenu(); } }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, currentDir); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { super.onCreateOptionsMenu(menu, inflater); inflater.inflate(R.menu.folder_browser, menu); menu.findItem(R.id.menu_selectAll).setVisible(!(files == null || files.isEmpty())); if (getApplication().getFavouritesManager().isFolderFavourite(currentDir)) { menu.findItem(R.id.menu_unfavourite).setVisible(true); menu.findItem(R.id.menu_favourite).setVisible(false); } else { menu.findItem(R.id.menu_unfavourite).setVisible(false); menu.findItem(R.id.menu_favourite).setVisible(true); } } @Override public void onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); menu.findItem(R.id.menu_paste).setVisible(Clipboard.getInstance().isEmpty() == false); menu.findItem(R.id.menu_navigate_up).setVisible(currentDir.getParentFile() != null); } void showEditTextDialog(int title, int okButtonText, final OnResultListener<CharSequence> enteredTextResult, CharSequence hint, CharSequence defaultValue) { View view = getActivity().getLayoutInflater().inflate(R.layout.dialog_edittext, (ViewGroup) getActivity().getWindow().getDecorView(), false); final EditText editText = (EditText) view.findViewById(android.R.id.edit); editText.setHint(hint); editText.setText(defaultValue); if (TextUtils.isEmpty(defaultValue) == false) { int end = defaultValue.toString().indexOf('.'); if (end > 0) editText.setSelection(0, end); } final Dialog dialog = new AlertDialog.Builder(getActivity()) .setTitle(title) .setView(view) .setPositiveButton(okButtonText, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { enteredTextResult.onResult(new AsyncResult<CharSequence>(editText.getText())); } }) .setNegativeButton(android.R.string.cancel, null) .create(); dialog.show(); dialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_VISIBLE); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.menu_selectAll: selectFiles(this.files); return true; case R.id.menu_navigate_up: String newFolder = currentDir.getParent(); if (newFolder != null) { Bundle args = new Bundle(1); args.putString(EXTRA_DIR, newFolder); FolderFragment fragment = new FolderFragment(); fragment.setArguments(args); FolderActivity activity = (FolderActivity) getActivity(); activity.showFragment(fragment); } return true; case R.id.menu_favourite: try { final String directoryName = FileUtils.getFolderDisplayName(currentDir); FavouritesManager favouritesManager = getApplication().getFavouritesManager(); favouritesManager.addFavourite(new FavouriteFolder(currentDir, directoryName)); getActivity().invalidateOptionsMenu(); } catch (FolderAlreadyFavouriteException e1) { e1.printStackTrace(); } return true; case R.id.menu_unfavourite: FavouritesManager favouritesManager = getApplication().getFavouritesManager(); favouritesManager.removeFavourite(currentDir); getActivity().invalidateOptionsMenu(); return true; case R.id.menu_create_folder: showEditTextDialog(R.string.create_folder, R.string.create, new OnResultListener<CharSequence>() { @Override public void onResult(AsyncResult<CharSequence> result) { try { String name = result.getResult().toString(); File newFolder = new File(currentDir, name); if (newFolder.mkdirs()) { refreshFolder(); Toast.makeText(getActivity(), R.string.folder_created_successfully, Toast.LENGTH_SHORT).show(); navigateTo(newFolder); } else Toast.makeText(getActivity(), R.string.folder_could_not_be_created, Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } } }, "", ""); return true; case R.id.menu_paste: pasteFiles(); return true; case R.id.menu_refresh: refreshFolder(); return true; } return super.onOptionsItemSelected(item); } public void pasteFiles() { new AsyncTask<Clipboard, Float, Exception>() { ProgressDialog progressDialog; @Override protected void onPreExecute() { super.onPreExecute(); progressDialog = new ProgressDialog(getActivity()); progressDialog.setTitle(getActivity().getString(R.string.pasting_files_)); progressDialog.setIndeterminate(false); progressDialog.setCancelable(false); progressDialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL); progressDialog.show(); } @Override protected void onProgressUpdate(Float... values) { float progress = values[0]; progressDialog.setMax(100); progressDialog.setProgress((int) (progress * 100)); } @Override protected Exception doInBackground(Clipboard... params) { try { final int total = FileUtils.countFilesIn(params[0].getFiles()); final int[] progress = {0}; params[0].paste(currentDir, new FileOperationListener() { @Override public void onFileProcessed(String filename) { progress[0]++; publishProgress((float)progress[0] / (float)total); } @Override public boolean isOperationCancelled() { return isCancelled(); } }); return null; } catch (IOException e) { e.printStackTrace(); return e; } } @Override protected void onCancelled() { progressDialog.dismiss(); refreshFolder(); }; @Override protected void onPostExecute(Exception result) { progressDialog.dismiss(); refreshFolder(); if (result == null) { Clipboard.getInstance().clear(); Toast.makeText(getActivity(), R.string.files_pasted, Toast.LENGTH_SHORT).show(); } else { new AlertDialog.Builder(getActivity()) .setMessage(result.getMessage()) .setPositiveButton(android.R.string.ok, null) .show(); } }; }.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, Clipboard.getInstance()); } @Override public void onViewCreated(View view, final Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); getFontApplicator().applyFont(view); loadFileList(); if (selectedFiles.isEmpty() == false) { selectFiles(selectedFiles); } final String directoryName = FileUtils.getFolderDisplayName(currentDir); getActivity().setTitle(directoryName); getListView().setOnItemClickListener(FolderFragment.this); getListView().setOnScrollListener(this); getListView().setOnItemLongClickListener(this); getListView().setMultiChoiceModeListener(this); getActivity().getActionBar().setSubtitle(FileUtils.getUserFriendlySdcardPath(currentDir)); if (topVisibleItem <= DISTANCE_TO_HIDE_ACTIONBAR) setActionbarVisibility(true); // add listview header to push items below the actionbar ListViewUtils.addListViewHeader(getListView(), getActivity()); if (fileAdapter != null) setListAdapter(fileAdapter); FolderActivity activity = (FolderActivity) getActivity(); activity.setLastFolder(currentDir); } @Override public void onDestroyView() { finishActionMode(true); listView = null; super.onDestroyView(); } @Override public void onDestroy() { if (loadFilesTask != null) loadFilesTask.cancel(true); if (thumbCache != null) thumbCache.evictAll(); super.onDestroy(); } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(EXTRA_SCROLL_POSITION, topVisibleItem); outState.putSerializable(EXTRA_SELECTED_FILES, selectedFiles); } void navigateTo(File folder) { nextDir = folder; FolderActivity activity = (FolderActivity) getActivity(); FolderFragment fragment = new FolderFragment(); Bundle args = new Bundle(); args.putString(EXTRA_DIR, folder.getAbsolutePath()); fragment.setArguments(args); activity.showFragment(fragment); } void openFile(File file) { if (file.isDirectory()) throw new IllegalArgumentException("File cannot be a directory!"); Intent intent = IntentUtils.createFileOpenIntent(file); try { startActivity(intent); } catch (ActivityNotFoundException e) { startActivity(Intent.createChooser(intent, getString(R.string.open_file_with_, file.getName()))); } catch (Exception e) { new AlertDialog.Builder(getActivity()) .setMessage(e.getMessage()) .setTitle(R.string.error) .setPositiveButton(android.R.string.ok, null) .show(); } } @Override public void onItemClick(AdapterView<?> adapterView, View arg1, int position, long arg3) { Object selectedObject = adapterView.getItemAtPosition(position); if (selectedObject instanceof File) { if (actionMode == null) { File selectedFile = (File) selectedObject; if (selectedFile.isDirectory()) navigateTo(selectedFile); else openFile(selectedFile); } else { toggleFileSelected((File) selectedObject); } } } void setActionbarVisibility(boolean visible) { if (actionMode == null || visible == true) // cannot hide CAB ((FolderActivity) getActivity()).setActionbarVisible(visible); } @Override public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) { if (firstVisibleItem < this.topVisibleItem - DISTANCE_TO_HIDE_ACTIONBAR) { setActionbarVisibility(true); this.topVisibleItem = firstVisibleItem; } else if (firstVisibleItem > this.topVisibleItem + DISTANCE_TO_HIDE_ACTIONBAR) { setActionbarVisibility(false); this.topVisibleItem = firstVisibleItem; } ListAdapter adapter = view.getAdapter(); if (adapter instanceof HeaderViewListAdapter) { HeaderViewListAdapter headerViewListAdapter = (HeaderViewListAdapter) adapter; if (headerViewListAdapter.getWrappedAdapter() instanceof FileCardAdapter) { int startPrefetch = firstVisibleItem + visibleItemCount-headerViewListAdapter.getHeadersCount(); ((FileCardAdapter) headerViewListAdapter.getWrappedAdapter()).prefetchImages(startPrefetch, visibleItemCount); } } } @Override public void onScrollStateChanged(AbsListView view, int scrollState) { } @Override public boolean onItemLongClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) { setFileSelected((File) arg0.getItemAtPosition(arg2), true); return true; } void showFileInfo(Collection<File> files) { final CharSequence title; final StringBuilder message = new StringBuilder(); if (files.size() == 1) title = ((File) files.toArray()[0]).getName(); else title = getString(R.string._d_objects, files.size()); if (files.size() > 1) message.append(FileUtils.combineFileNames(files)).append("\n\n"); message.append(getString(R.string.size_s, FileUtils.formatFileSize(files))).append('\n'); message.append(getString(R.string.mime_type_s, FileUtils.getCollectiveMimeType(files))); new AlertDialog.Builder(getActivity()) .setTitle(title) .setMessage(message) .setPositiveButton(android.R.string.ok, null) .show(); } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.action_delete: new AlertDialog.Builder(getActivity()) .setMessage(getString(R.string.delete_d_items_, selectedFiles.size())) .setPositiveButton(R.string.delete, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { int n = FileUtils.deleteFiles(selectedFiles); Toast.makeText(getActivity(), getString(R.string._d_files_deleted, n), Toast.LENGTH_SHORT).show(); refreshFolder(); finishActionMode(false); } }) .setNegativeButton(android.R.string.cancel, null) .show(); return true; case R.id.action_selectAll: if (isEverythingSelected()) clearFileSelection(); else selectFiles(files); return true; case R.id.action_info: if (selectedFiles.isEmpty()) return true; showFileInfo(selectedFiles); return true; case R.id.action_copy: Clipboard.getInstance().addFiles(selectedFiles, FileAction.Copy); Toast.makeText(getActivity(), R.string.objects_copied_to_clipboard, Toast.LENGTH_SHORT).show(); finishActionMode(false); return true; case R.id.action_cut: Clipboard clipboard = Clipboard.getInstance(); clipboard.addFiles(selectedFiles, FileAction.Cut); Toast.makeText(getActivity(), R.string.objects_cut_to_clipboard, Toast.LENGTH_SHORT).show(); finishActionMode(false); return true; case R.id.action_rename: final File fileToRename = (File) selectedFiles.toArray()[0]; showEditTextDialog(fileToRename.isDirectory()?R.string.rename_folder:R.string.rename_file, R.string.rename, new OnResultListener<CharSequence>() { @Override public void onResult(AsyncResult<CharSequence> result) { try { String newName = result.getResult().toString(); if (fileToRename.renameTo(new File(fileToRename.getParentFile(), newName))) { finishActionMode(false); refreshFolder(); Toast.makeText(getActivity(), R.string.file_renamed, Toast.LENGTH_SHORT).show(); } else Toast.makeText(getActivity(), getActivity().getString(R.string.file_could_not_be_renamed_to_s, newName), Toast.LENGTH_SHORT).show(); } catch (Exception e) { e.printStackTrace(); Toast.makeText(getActivity(), e.getMessage(), Toast.LENGTH_SHORT).show(); } } }, fileToRename.getName(), fileToRename.getName()); return true; case R.id.menu_add_homescreen_icon: for (File file : selectedFiles) IntentUtils.createShortcut(getActivity(), file); Toast.makeText(getActivity(), R.string.shortcut_created, Toast.LENGTH_SHORT).show(); actionMode.finish(); return true; } return false; } protected void refreshFolder() { showProgress(); loadFileList(); } void updateActionMode() { if (actionMode != null) { actionMode.invalidate(); int count = selectedFiles.size(); actionMode.setTitle(getString(R.string._d_objects, count)); actionMode.setSubtitle(FileUtils.combineFileNames(selectedFiles)); if (shareActionProvider != null) { final Intent shareIntent; if (selectedFiles.isEmpty()) shareIntent = null; else if (selectedFiles.size() == 1) { File file = (File) selectedFiles.toArray()[0]; shareIntent = new Intent(Intent.ACTION_SEND); shareIntent.setType(FileUtils.getFileMimeType(file)); shareIntent.putExtra(Intent.EXTRA_STREAM, Uri.fromFile(file)); } else { ArrayList<Uri> fileUris = new ArrayList<Uri>(selectedFiles.size()); for (File file : selectedFiles) if (file.isDirectory() == false) { fileUris.add(Uri.fromFile(file)); } shareIntent = new Intent(Intent.ACTION_SEND_MULTIPLE); shareIntent.putParcelableArrayListExtra(Intent.EXTRA_STREAM, fileUris); shareIntent.setType(FileUtils.getCollectiveMimeType(selectedFiles)); } shareActionProvider.setShareIntent(shareIntent); } } } @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { setActionbarVisibility(true); getActivity().getMenuInflater().inflate(R.menu.action_file, menu); getActivity().getMenuInflater().inflate(R.menu.action_file_single, menu); MenuItem shareMenuItem = menu.findItem(R.id.action_share); shareActionProvider = (ShareActionProvider) shareMenuItem.getActionProvider(); this.preserveSelection = false; return true; } void finishSelection() { if (listView != null) listView.setChoiceMode(ListView.CHOICE_MODE_NONE); clearFileSelection(); } void finishActionMode(boolean preserveSelection) { this.preserveSelection = preserveSelection; if (actionMode != null) actionMode.finish(); } @Override public void onDestroyActionMode(ActionMode mode) { actionMode = null; shareActionProvider = null; if (preserveSelection == false) finishSelection(); Log.d(LOG_TAG, "Action mode destroyed"); } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { int count = selectedFiles.size(); if (count == 1) { menu.findItem(R.id.action_rename).setVisible(true); menu.findItem(R.id.menu_add_homescreen_icon).setTitle(R.string.add_to_homescreen); } else { menu.findItem(R.id.action_rename).setVisible(false); menu.findItem(R.id.menu_add_homescreen_icon).setTitle(R.string.add_to_homescreen_multiple); } // show Share button if no folder was selected boolean allowShare = (count > 0); if (allowShare) { for (File file : selectedFiles) if (file.isDirectory()) { allowShare = false; break; } } menu.findItem(R.id.action_share).setVisible(allowShare); return true; } @Override public void onItemCheckedStateChanged(ActionMode mode, int position, long id, boolean checked) { } void toggleFileSelected(File file) { setFileSelected(file, !selectedFiles.contains(file)); } void clearFileSelection() { if (listView != null) listView.clearChoices(); selectedFiles.clear(); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); Log.d(LOG_TAG, "Selection cleared"); } boolean isEverythingSelected() { return selectedFiles.size() == files.size(); } void selectFiles(Collection<File> files) { if (files == null || files.isEmpty()) return; if (actionMode == null) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); actionMode = getActivity().startActionMode(this); } selectedFiles.addAll(files); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); } void setFileSelected(File file, boolean selected) { if (actionMode == null) { listView.setChoiceMode(ListView.CHOICE_MODE_MULTIPLE); actionMode = getActivity().startActionMode(this); } if (selected) selectedFiles.add(file); else selectedFiles.remove(file); updateActionMode(); if (fileAdapter != null) fileAdapter.notifyDataSetChanged(); if (selectedFiles.isEmpty()) finishActionMode(false); } @Override public void onFileSelected(File file) { toggleFileSelected(file); } }
Remove unneeded semicolons
src/com/michaldabski/filemanager/folders/FolderFragment.java
Remove unneeded semicolons
Java
cc0-1.0
158a6071be2c6f73f057087ea3dca438f852da3f
0
PedaB/mapsplit,PedaB/mapsplit
/* * Mapsplit - A simple but fast tile splitter for large OSM data * * Written in 2011 by Peda (osm-mapsplit@won2.de) * * To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to * this software to the public domain worldwide. This software is distributed without any warranty. * * You should have received a copy of the CC0 Public Domain Dedication along with this software. If not, see * <http://creativecommons.org/publicdomain/zero/1.0/>. */ import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.text.DateFormat; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import java.util.TreeSet; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.imintel.mbtiles4j.MBTilesWriteException; import org.imintel.mbtiles4j.MBTilesWriter; import org.imintel.mbtiles4j.model.MetadataEntry; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.openstreetmap.osmosis.core.container.v0_6.BoundContainer; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.container.v0_6.NodeContainer; import org.openstreetmap.osmosis.core.container.v0_6.RelationContainer; import org.openstreetmap.osmosis.core.container.v0_6.WayContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Bound; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Relation; import org.openstreetmap.osmosis.core.domain.v0_6.RelationMember; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.RunnableSource; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.openstreetmap.osmosis.osmbinary.file.BlockOutputStream; // import crosby.binary.file.BlockOutputStream; import crosby.binary.osmosis.OsmosisReader; import crosby.binary.osmosis.OsmosisSerializer; public class MapSplit { private static final String PBF_EXT = ".pbf"; /* * the zoom-level at which we render our tiles Attention: Code is not generic enough to change this value without * further code changes! ;) */ private static final int ZOOM = 13; private static final int YMAX = 1 << ZOOM; // TMS scheme /* * the default sizes for the hash maps: should be a factor 2-4 of nodes in the pbf you want to read */ private static final int NODE_MAP_SIZE = 60000000; private static final int WAY_MAP_SIZE = 10000000; private static final int RELATION_MAP_SIZE = 2500000; // all data after this appointment date is considered new or modified private Date appointmentDate; private Date latestDate = new Date(0); // the size of the border (in percent for a tile's height and width) for single tiles private double border = 0.1; // the input file we're going to split private File input; // maximum number of files open at the same time private int maxFiles; // internal store to check if reading the file worked private boolean complete = false; // verbose outpu private boolean verbose = false; // the hashmap for all nodes in the osm map private OsmMap nmap; // the hashmap for all ways in the osm map private OsmMap wmap; // the hashmap for all relations in the osm map private OsmMap rmap; // a map of ways that need to be added in a second run private HashMap<Long, Collection<Long>> extraWayMap = null; // a bitset telling the algorithm which tiles need to be rerendered private BitSet modifiedTiles = new BitSet(); // the serializer (OSM writers) for any modified tile private Map<Integer, OsmosisSerializer> outFiles; private Map<Integer, ByteArrayOutputStream> outBlobs; public MapSplit(Date appointmentDate, int[] mapSizes, int maxFiles, double border, File inputFile, boolean completeRelations) { this.border = border; this.input = inputFile; this.appointmentDate = appointmentDate; this.maxFiles = maxFiles; nmap = new HeapMap(mapSizes[0]); wmap = new HeapMap(mapSizes[1]); rmap = new HeapMap(mapSizes[2]); if (completeRelations) { extraWayMap = new HashMap<Long, Collection<Long>>(); } } public static double tile2lon(int x) { return (x / Math.pow(2.0, ZOOM)) * 360.0 - 180.0; } public static double tile2lat(int y) { double n = Math.PI - 2.0 * Math.PI * y / Math.pow(2, ZOOM); return (180.0 / Math.PI * Math.atan(0.5 * (Math.pow(Math.E, n) - Math.pow(Math.E, -n)))); } public static int lon2tileX(double lon) { return (int) Math.floor((lon + 180.0) / 360.0 * Math.pow(2.0, ZOOM)); } public static int lat2tileY(double lat) { return (int) Math .floor((1.0 - Math.log(Math.tan(lat * Math.PI / 180.0) + 1.0 / Math.cos(lat * Math.PI / 180.0)) / Math.PI) / 2.0 * Math.pow(2.0, ZOOM)); } /* Calculate the Bound for the given tile */ public Bound getBound(int tileX, int tileY) { double l = tile2lon(tileX); double r = tile2lon(tileX + 1); double t = tile2lat(tileY); double b = tile2lat(tileY + 1); double dx = r - l; double dy = b - t; l -= border * dx; r += border * dx; t -= border * dy; b += border * dy; return new Bound(r, l, t, b, "mapsplit"); } private void checkAndFill(Collection<Long> tiles) { int minX = Integer.MAX_VALUE, minY = Integer.MAX_VALUE; int maxX = Integer.MIN_VALUE, maxY = Integer.MIN_VALUE; // determine the min/max tile nrs for (long tile : tiles) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); minX = Math.min(minX, tx); minY = Math.min(minY, ty); maxX = Math.max(maxX, tx); maxY = Math.max(maxY, ty); } // enlarge min/max to have a border and to cope with possible neighbourhood tiles minX -= 2; minY -= 2; maxX += 2; maxY += 2; int sizeX = maxX - minX + 1; int sizeY = maxY - minY + 1; // fill the helperSet which marks any set tile BitSet helperSet = new BitSet(); for (long tile : tiles) { int tx = nmap.tileX(tile) - minX; int ty = nmap.tileY(tile) - minY; int neighbour = nmap.neighbour(tile); helperSet.set(tx + ty * sizeX); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { helperSet.set(tx + 1 + ty * sizeX); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { helperSet.set(tx + (ty + 1) * sizeX); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { helperSet.set(tx + 1 + (ty + 1) * sizeX); } } // start with tile 1,1 and fill region... Stack<Integer> stack = new Stack<Integer>(); stack.push(1 + 1 * sizeX); // fill all tiles that are reachable by a 4-neighbourhood while (!stack.isEmpty()) { int val = stack.pop(); boolean isSet = helperSet.get(val); helperSet.set(val); if (val >= sizeX * sizeY) { continue; } int ty = val / sizeX; int tx = val % sizeX; if ((tx == 0) || (ty == 0) || (ty >= sizeY)) { continue; } if (!isSet) { stack.push(tx + 1 + ty * sizeX); stack.push(tx - 1 + ty * sizeX); stack.push(tx + (ty + 1) * sizeX); stack.push(tx + (ty - 1) * sizeX); } } // now check if there are not-set bits left (i.e. holes in tiles) int idx = -1; while (true) { idx = helperSet.nextClearBit(idx + 1); if (idx >= sizeX * sizeY) { break; } int tx = idx % sizeX; int ty = idx / sizeX; if ((tx == 0) || (ty == 0)) { continue; } tx += minX; ty += minY; // TODO: make this a bit nicer by delegating the id-generation to the map code tiles.add(((long) tx) << 51 | ((long) ty) << 38); modifiedTiles.set(tx << 13 | ty); } } /* calculate the lon-offset for the given border size */ private double deltaX(double lon) { int tx = lon2tileX(lon); double x1 = tile2lon(tx); double x2 = tile2lon(tx + 1); return border * (x2 - x1); } /* calculate the lat-offset for the given border size */ private double deltaY(double lat) { int ty = lat2tileY(lat); double y1 = tile2lat(ty); double y2 = tile2lat(ty + 1); return border * (y2 - y1); } private void addNodeToMap(Node n, double lat, double lon) { int tileX = lon2tileX(lon); int tileY = lat2tileY(lat); int neighbour = OsmMap.NEIGHBOURS_NONE; // check and add border if needed double dx = deltaX(lon); if (lon2tileX(lon + dx) > tileX) { neighbour = OsmMap.NEIGHBOURS_EAST; } else if (lon2tileX(lon - dx) < tileX) { tileX--; neighbour = OsmMap.NEIGHBOURS_EAST; } double dy = deltaY(lat); if (lat2tileY(lat + dy) > tileY) { neighbour += OsmMap.NEIGHBOURS_SOUTH; } else if (lat2tileY(lat - dy) < tileY) { tileY--; neighbour += OsmMap.NEIGHBOURS_SOUTH; } // mark current tile (and neighbours) to be rerendered if (n.getTimestamp().after(appointmentDate)) { modifiedTiles.set(tileX << 13 | tileY); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tileX + 1) << 13 | tileY); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tileX << 13 | (tileY + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tileX + 1) << 13 | (tileY + 1)); } } // mark the latest changes made to this map if (n.getTimestamp().after(latestDate)) { latestDate = n.getTimestamp(); } nmap.put(n.getId(), tileX, tileY, neighbour); } private void addWayToMap(Way way) { boolean modified = way.getTimestamp().after(appointmentDate); Set<Long> tileList = new TreeSet<Long>(); // mark the latest changes made to this map if (way.getTimestamp().after(latestDate)) { latestDate = way.getTimestamp(); } List<Long> tiles = new ArrayList<>(); for (WayNode wayNode : way.getWayNodes()) { // get tileNrs for given node long tile = nmap.get(wayNode.getNodeId()); // don't ignore missing nodes if (tile == 0) { if (verbose) { System.out.println("way " + way.getId() + " missing node " + wayNode.getNodeId()); } return; } tiles.add(tile); } for (long tile : tiles) { // mark tiles (and possible neighbours) as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); modifiedTiles.set(tx << 13 | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << 13 | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << 13 | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << 13 | (ty + 1)); } } tileList.add(tile); } // TODO check/verify if 8 tiles is ok or if there might be corner-cases with only 4 tiles // with more than 8 (or 4?!) tiles in the list we might have a "hole" if (tileList.size() >= 8) { checkAndFill(tileList); } // bootstrap a tilepos for the way long id = way.getWayNodes().get(0).getNodeId(); long val = nmap.get(id); int tx = nmap.tileX(val); int ty = nmap.tileY(val); // put way into map with a "random" base tile wmap.put(way.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the way knows which tiles it belongs to wmap.update(way.getId(), tileList); for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } /** * Iterate over the way nodes and add tileList to the list of tiles they are supposed to be in * * @param way the Way we are processing * @param tileList the List of tiles */ private void addExtraWayToMap(@NotNull Way way, @NotNull Collection<Long> tileList) { for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } private void addRelationToMap(@NotNull Relation r) { boolean modified = r.getTimestamp().after(appointmentDate); Collection<Long> tileList = new TreeSet<Long>(); if (r.getTimestamp().after(latestDate)) { latestDate = r.getTimestamp(); } for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: long tile = nmap.get(m.getMemberId()); // The referenced node is not in our data set if (tile == 0) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a node)"); } continue; } // mark tiles as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); modifiedTiles.set(tx << 13 | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << 13 | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << 13 | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << 13 | (ty + 1)); } } tileList.add(tile); break; case Way: List<Integer> list = wmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a way)"); } return; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } // TODO: make this a bit more generic / nicer code :/ for (int i : list) { tileList.add(((long) i) << 38); } break; case Relation: list = rmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a relation)"); } return; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } for (int i : list) { tileList.add(((long) i) << 38); } break; } } // Just in case, this can happen due to silly input data :'( if (tileList.isEmpty()) { System.out.println("Ignoring empty relation"); return; } if (tileList.size() >= 8) { checkAndFill(tileList); } long val = tileList.iterator().next(); int tx = rmap.tileX(val); int ty = rmap.tileY(val); // put relation into map with a "random" base tile rmap.put(r.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the relation knows in which tiles it is needed rmap.update(r.getId(), tileList); if (extraWayMap != null) { // only add members to all the tiles if we are in // completeRelations mode for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: nmap.update(m.getMemberId(), tileList); break; case Way: wmap.update(m.getMemberId(), tileList); extraWayMap.put(m.getMemberId(), tileList); break; case Relation: rmap.update(m.getMemberId(), tileList); break; case Bound: break; } } } } static int nCount = 0; static int wCount = 0; static int rCount = 0; public void setup(final boolean verbose) throws IOException { this.verbose = verbose; RunnableSource reader = new OsmosisReader(new FileInputStream(input)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof NodeContainer) { Node n = ((NodeContainer) ec).getEntity(); addNodeToMap(n, n.getLatitude(), n.getLongitude()); if (verbose) { nCount++; if ((nCount % (nmap.getSize() / 20)) == 0) { System.out.println(nCount + " nodes processed"); } } } else if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); addWayToMap(w); if (verbose) { wCount++; if ((wCount % (wmap.getSize() / 20)) == 0) { System.out.println(wCount + " ways processed"); } } } else if (ec instanceof RelationContainer) { Relation r = ((RelationContainer) ec).getEntity(); addRelationToMap(r); if (verbose) { rCount++; if ((rCount % (rmap.getSize() / 20)) == 0) { System.out.println(wCount + " relations processed"); } } } else if (ec instanceof BoundContainer) { // nothing todo, we ignore bound tags } else { System.err.println("Unknown Element while reading"); System.err.println(ec.toString()); System.err.println(ec.getEntity().toString()); } } @Override public void initialize(Map<String, Object> metaData) { // TODO Auto-generated method stub } @Override public void close() { // TODO Auto-generated method stub } }); Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not read file fully"); } if (verbose) { System.out.println("We have read:\n" + nCount + " nodes\n" + wCount + " ways\n" + rCount + " relations"); } // Second run if we are in complete-relation-mode if (extraWayMap != null) { complete = false; reader = new OsmosisReader(new FileInputStream(input)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); Collection<Long> tileList = extraWayMap.get(w.getId()); if (tileList != null) { addExtraWayToMap(w, tileList); } } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not read file fully in second run"); } } } private boolean isInside(double x, double y, double[] polygon) { boolean in = false; int lines = polygon.length / 2; for (int i = 0, j = lines - 1; i < lines; j = i++) { if (((polygon[2 * i + 1] > y) != (polygon[2 * j + 1] > y)) && (x < (polygon[2 * j] - polygon[2 * i]) * (y - polygon[2 * i + 1]) / (polygon[2 * j + 1] - polygon[2 * i + 1]) + polygon[2 * i])) { in = !in; } } return in; } private boolean isInside(int tx, int ty, double[] polygon) { for (int u = 0; u < 2; u++) { for (int v = 0; v < 2; v++) { double x = tile2lon(tx + u); double y = tile2lat(ty + v); if (isInside(x, y, polygon)) { return true; } } } return false; } private boolean isInside(int tx, int ty, List<double[]> inside, List<double[]> outside) { boolean in = false; for (double[] polygon : inside) { in |= isInside(tx, ty, polygon); if (in) { break; } } if (!in) { return false; } for (double[] polygon : outside) { if (isInside(tx, ty, polygon)) { return false; } } return true; } public void clipPoly(String polygonFile) throws IOException { List<double[]> inside = new ArrayList<double[]>(); List<double[]> outside = new ArrayList<double[]>(); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(polygonFile))); /* String name = */ br.readLine(); // unused.. String poly = br.readLine(); while (!"END".equals(poly)) { int pos = 0; int size = 128; double[] data = new double[2 * size]; String coords = br.readLine(); while (!"END".equals(coords)) { coords = coords.trim(); int idx = coords.indexOf(" "); double lon = Double.parseDouble(coords.substring(0, idx)); double lat = Double.parseDouble(coords.substring(idx + 1)); // check if there's enough space to store if (pos >= size) { double[] tmp = new double[4 * size]; System.arraycopy(data, 0, tmp, 0, 2 * size); size *= 2; data = tmp; } // store data data[2 * pos] = lon; data[2 * pos + 1] = lat; pos++; coords = br.readLine(); } if (pos != size) { double[] tmp = new double[2 * pos]; System.arraycopy(data, 0, tmp, 0, 2 * pos); data = tmp; } if (poly.startsWith("!")) { outside.add(data); } else { inside.add(data); } // read next polygon, if there's any poly = br.readLine(); } // now walk modifiedTiles and clear bits that are not inside polygon int idx = 0; while (true) { idx = modifiedTiles.nextSetBit(idx + 1); if (idx == -1) { break; } int tx = idx >> 13; int ty = idx & 8191; boolean in = isInside(tx, ty, inside, outside); if (!in) { modifiedTiles.clear(idx); } } } /** * Read the input file, process the OSM elements and write them out * * @param basename the basename for individual tile files or the name of a MBTiles format sqlite database * @param metadata write metadata (version, timestamp, etc) * @param verbose verbose output if true * @param mbTiles write to a MBTiles format sqlite database instead of writing individual tiles * @throws IOException if reading or creating the files has an issue */ public void store(@NotNull String basename, boolean metadata, boolean verbose, boolean mbTiles) throws IOException { int idx = 0; MBTilesWriter w = null; if (mbTiles) { try { w = new MBTilesWriter(new File(basename)); } catch (MBTilesWriteException e1) { throw new IOException(e1); } } // We might call this code several times if we have more tiles // to store than open files allowed while (true) { complete = false; outFiles = new HashMap<Integer, OsmosisSerializer>(); if (mbTiles) { outBlobs = new HashMap<Integer, ByteArrayOutputStream>(); } // Setup out-files... int count = 0; while (true) { idx = modifiedTiles.nextSetBit(idx + 1); if (idx == -1) { break; } if (outFiles.get(idx) == null) { int tileX = idx >> 13; int tileY = idx & 8191; OutputStream target = null; if (mbTiles) { target = new ByteArrayOutputStream(); } else { String file; if (basename.contains("%x") && basename.contains("%y")) { file = basename.replace("%x", Integer.toString(tileX)).replace("%y", Integer.toString(tileY)); if (!file.endsWith(PBF_EXT)) { file = file + PBF_EXT; } } else { file = basename + tileX + "_" + tileY + PBF_EXT; } target = new FileOutputStream(file); } OsmosisSerializer serializer = new OsmosisSerializer(new BlockOutputStream(target)); serializer.setUseDense(true); serializer.configOmit(!metadata); // write out the bound for that tile Bound bound = getBound(tileX, tileY); BoundContainer bc = new BoundContainer(bound); serializer.process(bc); outFiles.put(idx, serializer); if (mbTiles) { outBlobs.put(idx, (ByteArrayOutputStream) target); } } if ((maxFiles != -1) && (++count >= maxFiles)) { break; } } // Now start writing output... RunnableSource reader = new OsmosisReader(new FileInputStream(input)); class BoundSink implements Sink { Bound overallBounds = null; /** * Get the overall bounds of the data * * @return a Bound object or null */ Bound getBounds() { return overallBounds; } @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { long id = ec.getEntity().getId(); List<Integer> tiles; if (ec instanceof NodeContainer) { tiles = nmap.getAllTiles(id); } else if (ec instanceof WayContainer) { tiles = wmap.getAllTiles(id); } else if (ec instanceof RelationContainer) { tiles = rmap.getAllTiles(id); } else if (ec instanceof BoundContainer) { Bound bounds = ((BoundContainer) ec).getEntity(); if (overallBounds == null) { overallBounds = bounds; } else { overallBounds.union(bounds); } return; } else { System.err.println("Unknown Element while reading"); System.err.println(ec.toString()); System.err.println(ec.getEntity().toString()); return; } if (tiles == null) { // No tile where we could store the given entity into // This probably is a degenerated relation ;) return; } for (int i : tiles) { if (modifiedTiles.get(i)) { OsmosisSerializer ser = outFiles.get(i); if (ser != null) { ser.process(ec); } } } } @Override public void initialize(Map<String, Object> metaData) { // do nothing } @Override public void close() { // do nothing } } ; BoundSink sink = new BoundSink(); reader.setSink(sink); Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not fully read file in storing run"); } // Finish and close files... for (Entry<Integer, OsmosisSerializer> entry : outFiles.entrySet()) { OsmosisSerializer ser = entry.getValue(); ser.complete(); ser.flush(); ser.close(); if (mbTiles) { int tileX = entry.getKey() >> 13; int tileY = entry.getKey() & 8191; int y = YMAX - tileY - 1; // TMS scheme ByteArrayOutputStream blob = outBlobs.get(entry.getKey()); try { w.addTile(blob.toByteArray(), 13, tileX, y); } catch (MBTilesWriteException e) { throw new IOException(e); } } } if (idx == -1) { // Add metadata parts if (mbTiles) { MetadataEntry ent = new MetadataEntry(); File file = new File(basename); ent.setTilesetName(file.getName()).setTilesetType(MetadataEntry.TileSetType.BASE_LAYER).setTilesetVersion("0.2.0") .setAttribution("OpenStreetMap Contributors ODbL 1.0").addCustomKeyValue("format", "application/vnd.openstreetmap.data+pbf") .addCustomKeyValue("minzoom", Integer.toString(ZOOM)).addCustomKeyValue("maxzoom", Integer.toString(ZOOM)); Bound bounds = sink.getBounds(); if (bounds != null) { ent.setTilesetBounds(bounds.getLeft(), bounds.getBottom(), bounds.getRight(), bounds.getTop()); } else { ent.setTilesetBounds(-180, -85, 180, 85); } try { w.addMetadataEntry(ent); } catch (MBTilesWriteException e) { throw new IOException(e); } w.close(); } break; } if (verbose) { System.out.println("Wrote " + maxFiles + " tiles, continuing with next block of tiles"); } } } /** * Set up options from the command line and run the tiler * * @param inputFile the input PBF file * @param outputBase * @param polygonFile * @param mapSizes * @param maxFiles * @param border * @param appointmentDate * @param metadata * @param verbose * @param timing * @param completeRelations * @param mbTiles * @return * @throws Exception */ private static Date run(@NotNull String inputFile, @NotNull String outputBase, @Nullable String polygonFile, int[] mapSizes, int maxFiles, double border, Date appointmentDate, boolean metadata, boolean verbose, boolean timing, boolean completeRelations, boolean mbTiles) throws Exception { long startup = System.currentTimeMillis(); MapSplit split = new MapSplit(appointmentDate, mapSizes, maxFiles, border, new File(inputFile), completeRelations); long time = System.currentTimeMillis(); split.setup(verbose); time = System.currentTimeMillis() - time; double nratio = split.nmap.getMissHitRatio(); double wratio = split.wmap.getMissHitRatio(); double rratio = split.rmap.getMissHitRatio(); if (polygonFile != null) { if (verbose) { System.out.println("Clip tiles with polygon given by \"" + polygonFile + "\""); } split.clipPoly(polygonFile); } int modified = split.modifiedTiles.cardinality(); if (timing) { System.out.println("Initial reading and datastructure setup took " + time + "ms"); } if (verbose) { System.out.println("We have " + modified + " modified tiles to store."); } time = System.currentTimeMillis(); split.store(outputBase, metadata, verbose, mbTiles); time = System.currentTimeMillis() - time; if (timing) { System.out.println("Saving " + modified + " tiles took " + time + "ms"); long overall = System.currentTimeMillis() - startup; System.out.print("\nOverall runtime: " + overall + "ms"); System.out.println(" == " + (overall / 1000 / 60) + "min"); } if (verbose) { System.out.println("\nHashmap's load:"); System.out.println("Nodes : " + split.nmap.getLoad()); System.out.println("Ways : " + split.wmap.getLoad()); System.out.println("Relations: " + split.rmap.getLoad()); System.out.println("\nHashmap's MissHitRatio:"); System.out.printf("Nodes : %10.6f\n", nratio); System.out.printf("Ways : %10.6f\n", wratio); System.out.printf("Relations: %10.6f\n", rratio); } return split.latestDate; } public static void main(String[] args) throws Exception { Date appointmentDate; String inputFile = null; String outputBase = null; String polygonFile = null; boolean verbose = false; boolean timing = false; boolean metadata = false; boolean completeRelations = false; boolean mbTiles = false; String dateFile = null; int[] mapSizes = new int[] { NODE_MAP_SIZE, WAY_MAP_SIZE, RELATION_MAP_SIZE }; int maxFiles = -1; double border = 0.0; // arguments Option helpOption = Option.builder("h").longOpt("help").desc("this help").build(); Option verboseOption = Option.builder("v").longOpt("verbose").desc("verbose information during processing").build(); Option timingOption = Option.builder("t").longOpt("timing").desc("output timing information").build(); Option metadataOption = Option.builder("m").longOpt("metadata").desc("store metadata in tile-files (version, timestamp)").build(); Option completeMPOption = Option.builder("c").longOpt("complete").desc("store complete data for multi polygons").build(); Option mbTilesOption = Option.builder("x").longOpt("mbtiles").desc("store in a MBTiles format sqlite database").build(); Option maxFilesOption = Option.builder("f").longOpt("maxfiles").hasArg().desc("maximum number of open files at a time").build(); Option borderOption = Option.builder("b").longOpt("border").hasArg() .desc("enlarge tiles by val ([0-1[) of the tile's size to get a border around the tile.").build(); Option polygonOption = Option.builder("p").longOpt("polygon").hasArg().desc("only save tiles that intersect or lie within the given polygon file.") .build(); Option dateOption = Option.builder("d").longOpt("date").hasArg().desc( "file containing the date since when tiles are being considered to have changed after the split the latest change in infile is going to be stored in file") .build(); Option sizeOption = Option.builder("s").longOpt("size").hasArg().desc( "n,w,r the size for the node-, way- and relation maps to use (should be at least twice the number of IDs). If not supplied, defaults will be taken.") .build(); Option inputOption = Option.builder("i").longOpt("input").hasArgs().desc("a file in OSM pbf format").required().build(); Option outputOption = Option.builder("o").longOpt("output").hasArg().desc( "if creating a MBTiels files this is the name of the file, otherwise this is the base name of all tiles that will be written. The filename may contain '%x' and '%y' which will be replaced with the tilenumbers at zoom 13") .required().build(); Options options = new Options(); options.addOption(helpOption); options.addOption(verboseOption); options.addOption(timingOption); options.addOption(metadataOption); options.addOption(completeMPOption); options.addOption(mbTilesOption); options.addOption(maxFilesOption); options.addOption(borderOption); options.addOption(polygonOption); options.addOption(dateOption); options.addOption(sizeOption); options.addOption(inputOption); options.addOption(outputOption); CommandLineParser parser = new DefaultParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mapsplit", options); return; } if (line.hasOption("v")) { verbose = true; } if (line.hasOption("t")) { timing = true; } if (line.hasOption("m")) { metadata = true; } if (line.hasOption("c")) { completeRelations = true; } if (line.hasOption("x")) { mbTiles = true; } if (line.hasOption("f")) { String tmp = line.getOptionValue("maxfiles"); maxFiles = Integer.valueOf(tmp); } if (line.hasOption("d")) { dateFile = line.getOptionValue("date"); } if (line.hasOption("p")) { polygonFile = line.getOptionValue("ploygon"); } if (line.hasOption("s")) { String tmp = line.getOptionValue("size"); String[] vals = tmp.split(","); for (int j = 0; j < 3; j++) { mapSizes[j] = Integer.valueOf(vals[j]); } } if (line.hasOption("b")) { String tmp = line.getOptionValue("border"); try { border = Double.valueOf(tmp); if (border < 0) { border = 0; } if (border > 1) { border = 1; } } catch (NumberFormatException e) { System.err.println("Could not parse border parameter, falling back to defaults"); } } if (line.hasOption("i")) { inputFile = line.getOptionValue("input"); } if (line.hasOption("o")) { outputBase = line.getOptionValue("output"); } } catch (ParseException exp) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mapsplit", options); return; } // Date-setup as fall-back option DateFormat df = DateFormat.getDateTimeInstance(); appointmentDate = new Date(0); if (dateFile == null && verbose) { System.out.println("No datefile given. Writing all available tiles."); } else if (dateFile != null) { File file = new File(dateFile); if (file.exists()) { DataInputStream dis = new DataInputStream(new FileInputStream(file)); String line = dis.readUTF(); if (line != null) { try { appointmentDate = df.parse(line); } catch (java.text.ParseException pe) { if (verbose) { System.out.println("Could not parse datefile."); } } } dis.close(); } else if (verbose) { System.out.println("Datefile does not exist, writing all tiles"); } } if (verbose) { System.out.println("Reading: " + inputFile); System.out.println("Writing: " + outputBase); } // Actually run the splitter... Date latest = run(inputFile, outputBase, polygonFile, mapSizes, maxFiles, border, appointmentDate, metadata, verbose, timing, completeRelations, mbTiles); if (verbose) { System.out.println("Last changes to the map had been done on " + df.format(latest)); } if (dateFile != null) { try (DataOutputStream dos = new DataOutputStream(new FileOutputStream(dateFile));) { dos.writeUTF(df.format(latest)); } } } }
src/main/java/MapSplit.java
/* * Mapsplit - A simple but fast tile splitter for large OSM data * * Written in 2011 by Peda (osm-mapsplit@won2.de) * * To the extent possible under law, the author(s) have dedicated all copyright and related and neighboring rights to * this software to the public domain worldwide. This software is distributed without any warranty. * * You should have received a copy of the CC0 Public Domain Dedication along with this software. If not, see * <http://creativecommons.org/publicdomain/zero/1.0/>. */ import java.io.BufferedReader; import java.io.ByteArrayOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStream; import java.text.DateFormat; import java.util.ArrayList; import java.util.BitSet; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import java.util.TreeSet; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.DefaultParser; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.imintel.mbtiles4j.MBTilesWriteException; import org.imintel.mbtiles4j.MBTilesWriter; import org.imintel.mbtiles4j.model.MetadataEntry; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.openstreetmap.osmosis.core.container.v0_6.BoundContainer; import org.openstreetmap.osmosis.core.container.v0_6.EntityContainer; import org.openstreetmap.osmosis.core.container.v0_6.NodeContainer; import org.openstreetmap.osmosis.core.container.v0_6.RelationContainer; import org.openstreetmap.osmosis.core.container.v0_6.WayContainer; import org.openstreetmap.osmosis.core.domain.v0_6.Bound; import org.openstreetmap.osmosis.core.domain.v0_6.Node; import org.openstreetmap.osmosis.core.domain.v0_6.Relation; import org.openstreetmap.osmosis.core.domain.v0_6.RelationMember; import org.openstreetmap.osmosis.core.domain.v0_6.Way; import org.openstreetmap.osmosis.core.domain.v0_6.WayNode; import org.openstreetmap.osmosis.core.task.v0_6.RunnableSource; import org.openstreetmap.osmosis.core.task.v0_6.Sink; import org.openstreetmap.osmosis.osmbinary.file.BlockOutputStream; // import crosby.binary.file.BlockOutputStream; import crosby.binary.osmosis.OsmosisReader; import crosby.binary.osmosis.OsmosisSerializer; public class MapSplit { private static final String PBF_EXT = ".pbf"; /* * the zoom-level at which we render our tiles Attention: Code is not generic enough to change this value without * further code changes! ;) */ private static final int ZOOM = 13; private static final int YMAX = 1 << ZOOM; // TMS scheme /* * the default sizes for the hash maps: should be a factor 2-4 of nodes in the pbf you want to read */ private static final int NODE_MAP_SIZE = 60000000; private static final int WAY_MAP_SIZE = 10000000; private static final int RELATION_MAP_SIZE = 2500000; // all data after this appointment date is considered new or modified private Date appointmentDate; private Date latestDate = new Date(0); // the size of the border (in percent for a tile's height and width) for single tiles private double border = 0.1; // the input file we're going to split private File input; // maximum number of files open at the same time private int maxFiles; // internal store to check if reading the file worked private boolean complete = false; // verbose outpu private boolean verbose = false; // the hashmap for all nodes in the osm map private OsmMap nmap; // the hashmap for all ways in the osm map private OsmMap wmap; // the hashmap for all relations in the osm map private OsmMap rmap; // a map of ways that need to be added in a second run private HashMap<Long, Collection<Long>> extraWayMap = null; // a bitset telling the algorithm which tiles need to be rerendered private BitSet modifiedTiles = new BitSet(); // the serializer (OSM writers) for any modified tile private Map<Integer, OsmosisSerializer> outFiles; private Map<Integer, ByteArrayOutputStream> outBlobs; public MapSplit(Date appointmentDate, int[] mapSizes, int maxFiles, double border, File inputFile, boolean completeRelations) { this.border = border; this.input = inputFile; this.appointmentDate = appointmentDate; this.maxFiles = maxFiles; nmap = new HeapMap(mapSizes[0]); wmap = new HeapMap(mapSizes[1]); rmap = new HeapMap(mapSizes[2]); if (completeRelations) { extraWayMap = new HashMap<Long, Collection<Long>>(); } } public static double tile2lon(int x) { return (x / Math.pow(2.0, ZOOM)) * 360.0 - 180.0; } public static double tile2lat(int y) { double n = Math.PI - 2.0 * Math.PI * y / Math.pow(2, ZOOM); return (180.0 / Math.PI * Math.atan(0.5 * (Math.pow(Math.E, n) - Math.pow(Math.E, -n)))); } public static int lon2tileX(double lon) { return (int) Math.floor((lon + 180.0) / 360.0 * Math.pow(2.0, ZOOM)); } public static int lat2tileY(double lat) { return (int) Math .floor((1.0 - Math.log(Math.tan(lat * Math.PI / 180.0) + 1.0 / Math.cos(lat * Math.PI / 180.0)) / Math.PI) / 2.0 * Math.pow(2.0, ZOOM)); } /* Calculate the Bound for the given tile */ public Bound getBound(int tileX, int tileY) { double l = tile2lon(tileX); double r = tile2lon(tileX + 1); double t = tile2lat(tileY); double b = tile2lat(tileY + 1); double dx = r - l; double dy = b - t; l -= border * dx; r += border * dx; t -= border * dy; b += border * dy; return new Bound(r, l, t, b, "mapsplit"); } private void checkAndFill(Collection<Long> tiles) { int minX = Integer.MAX_VALUE, minY = Integer.MAX_VALUE; int maxX = Integer.MIN_VALUE, maxY = Integer.MIN_VALUE; // determine the min/max tile nrs for (long tile : tiles) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); minX = Math.min(minX, tx); minY = Math.min(minY, ty); maxX = Math.max(maxX, tx); maxY = Math.max(maxY, ty); } // enlarge min/max to have a border and to cope with possible neighbourhood tiles minX -= 2; minY -= 2; maxX += 2; maxY += 2; int sizeX = maxX - minX + 1; int sizeY = maxY - minY + 1; // fill the helperSet which marks any set tile BitSet helperSet = new BitSet(); for (long tile : tiles) { int tx = nmap.tileX(tile) - minX; int ty = nmap.tileY(tile) - minY; int neighbour = nmap.neighbour(tile); helperSet.set(tx + ty * sizeX); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { helperSet.set(tx + 1 + ty * sizeX); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { helperSet.set(tx + (ty + 1) * sizeX); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { helperSet.set(tx + 1 + (ty + 1) * sizeX); } } // start with tile 1,1 and fill region... Stack<Integer> stack = new Stack<Integer>(); stack.push(1 + 1 * sizeX); // fill all tiles that are reachable by a 4-neighbourhood while (!stack.isEmpty()) { int val = stack.pop(); boolean isSet = helperSet.get(val); helperSet.set(val); if (val >= sizeX * sizeY) { continue; } int ty = val / sizeX; int tx = val % sizeX; if ((tx == 0) || (ty == 0) || (ty >= sizeY)) { continue; } if (!isSet) { stack.push(tx + 1 + ty * sizeX); stack.push(tx - 1 + ty * sizeX); stack.push(tx + (ty + 1) * sizeX); stack.push(tx + (ty - 1) * sizeX); } } // now check if there are not-set bits left (i.e. holes in tiles) int idx = -1; while (true) { idx = helperSet.nextClearBit(idx + 1); if (idx >= sizeX * sizeY) { break; } int tx = idx % sizeX; int ty = idx / sizeX; if ((tx == 0) || (ty == 0)) { continue; } tx += minX; ty += minY; // TODO: make this a bit nicer by delegating the id-generation to the map code tiles.add(((long) tx) << 51 | ((long) ty) << 38); modifiedTiles.set(tx << 13 | ty); } } /* calculate the lon-offset for the given border size */ private double deltaX(double lon) { int tx = lon2tileX(lon); double x1 = tile2lon(tx); double x2 = tile2lon(tx + 1); return border * (x2 - x1); } /* calculate the lat-offset for the given border size */ private double deltaY(double lat) { int ty = lat2tileY(lat); double y1 = tile2lat(ty); double y2 = tile2lat(ty + 1); return border * (y2 - y1); } private void addNodeToMap(Node n, double lat, double lon) { int tileX = lon2tileX(lon); int tileY = lat2tileY(lat); int neighbour = OsmMap.NEIGHBOURS_NONE; // check and add border if needed double dx = deltaX(lon); if (lon2tileX(lon + dx) > tileX) { neighbour = OsmMap.NEIGHBOURS_EAST; } else if (lon2tileX(lon - dx) < tileX) { tileX--; neighbour = OsmMap.NEIGHBOURS_EAST; } double dy = deltaY(lat); if (lat2tileY(lat + dy) > tileY) { neighbour += OsmMap.NEIGHBOURS_SOUTH; } else if (lat2tileY(lat - dy) < tileY) { tileY--; neighbour += OsmMap.NEIGHBOURS_SOUTH; } // mark current tile (and neighbours) to be rerendered if (n.getTimestamp().after(appointmentDate)) { modifiedTiles.set(tileX << 13 | tileY); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tileX + 1) << 13 | tileY); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tileX << 13 | (tileY + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tileX + 1) << 13 | (tileY + 1)); } } // mark the latest changes made to this map if (n.getTimestamp().after(latestDate)) { latestDate = n.getTimestamp(); } nmap.put(n.getId(), tileX, tileY, neighbour); } private void addWayToMap(Way way) { boolean modified = way.getTimestamp().after(appointmentDate); Set<Long> tileList = new TreeSet<Long>(); // mark the latest changes made to this map if (way.getTimestamp().after(latestDate)) { latestDate = way.getTimestamp(); } for (WayNode wayNode : way.getWayNodes()) { // get tileNrs for given node long tile = nmap.get(wayNode.getNodeId()); // don't ignore missing nodes if (tile == 0) { return; } // mark tiles (and possible neighbours) as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); modifiedTiles.set(tx << 13 | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << 13 | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << 13 | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << 13 | (ty + 1)); } } tileList.add(tile); } // TODO check/verify if 8 tiles is ok or if there might be corner-cases with only 4 tiles // with more than 8 (or 4?!) tiles in the list we might have a "hole" if (tileList.size() >= 8) { checkAndFill(tileList); } // bootstrap a tilepos for the way long id = way.getWayNodes().get(0).getNodeId(); long val = nmap.get(id); int tx = nmap.tileX(val); int ty = nmap.tileY(val); // put way into map with a "random" base tile wmap.put(way.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the way knows which tiles it belongs to wmap.update(way.getId(), tileList); for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } private void addExtraWayToMap(Way way, Collection<Long> tileList) { for (WayNode wayNode : way.getWayNodes()) { // update map so that the node knows about any additional // tile it has to be stored in nmap.update(wayNode.getNodeId(), tileList); } } private void addRelationToMap(Relation r) { boolean modified = r.getTimestamp().after(appointmentDate); Collection<Long> tileList = new TreeSet<Long>(); if (r.getTimestamp().after(latestDate)) { latestDate = r.getTimestamp(); } for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: long tile = nmap.get(m.getMemberId()); // The referenced node is not in our data set if (tile == 0) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a node)"); } continue; } // mark tiles as modified if (modified) { int tx = nmap.tileX(tile); int ty = nmap.tileY(tile); int neighbour = nmap.neighbour(tile); modifiedTiles.set(tx << 13 | ty); if ((neighbour & OsmMap.NEIGHBOURS_EAST) != 0) { modifiedTiles.set((tx + 1) << 13 | ty); } if ((neighbour & OsmMap.NEIGHBOURS_SOUTH) != 0) { modifiedTiles.set(tx << 13 | (ty + 1)); } if (neighbour == OsmMap.NEIGHBOURS_SOUTH_EAST) { modifiedTiles.set((tx + 1) << 13 | (ty + 1)); } } tileList.add(tile); break; case Way: List<Integer> list = wmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a way)"); } return; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } // TODO: make this a bit more generic / nicer code :/ for (int i : list) { tileList.add(((long) i) << 38); } break; case Relation: list = rmap.getAllTiles(m.getMemberId()); // The referenced way is not in our data set if (list == null) { if (verbose) { System.out.println("Non-complete Relation " + r.getId() + " (missing a relation)"); } return; } if (modified) { for (Integer i : list) { modifiedTiles.set(i); } } for (int i : list) { tileList.add(((long) i) << 38); } break; } } // Just in case, this can happen due to silly input data :'( if (tileList.isEmpty()) { System.out.println("Ignoring empty relation"); return; } if (tileList.size() >= 8) { checkAndFill(tileList); } long val = tileList.iterator().next(); int tx = rmap.tileX(val); int ty = rmap.tileY(val); // put relation into map with a "random" base tile rmap.put(r.getId(), tx, ty, OsmMap.NEIGHBOURS_NONE); // update map so that the relation knows in which tiles it is needed rmap.update(r.getId(), tileList); for (RelationMember m : r.getMembers()) { switch (m.getMemberType()) { case Node: nmap.update(m.getMemberId(), tileList); break; case Way: wmap.update(m.getMemberId(), tileList); if (extraWayMap != null) { extraWayMap.put(m.getMemberId(), tileList); } break; case Relation: default: // not handled } } } static int nCount = 0; static int wCount = 0; static int rCount = 0; public void setup(final boolean verbose) throws IOException { this.verbose = verbose; RunnableSource reader = new OsmosisReader(new FileInputStream(input)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof NodeContainer) { Node n = ((NodeContainer) ec).getEntity(); addNodeToMap(n, n.getLatitude(), n.getLongitude()); if (verbose) { nCount++; if ((nCount % (nmap.getSize() / 20)) == 0) { System.out.println(nCount + " nodes processed"); } } } else if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); addWayToMap(w); if (verbose) { wCount++; if ((wCount % (wmap.getSize() / 20)) == 0) { System.out.println(wCount + " ways processed"); } } } else if (ec instanceof RelationContainer) { Relation r = ((RelationContainer) ec).getEntity(); addRelationToMap(r); if (verbose) { rCount++; if ((rCount % (rmap.getSize() / 20)) == 0) { System.out.println(wCount + " relations processed"); } } } else if (ec instanceof BoundContainer) { // nothing todo, we ignore bound tags } else { System.err.println("Unknown Element while reading"); System.err.println(ec.toString()); System.err.println(ec.getEntity().toString()); } } @Override public void initialize(Map<String, Object> metaData) { // TODO Auto-generated method stub } @Override public void close() { // TODO Auto-generated method stub } }); Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not read file fully"); } if (verbose) { System.out.println("We have read:\n" + nCount + " nodes\n" + wCount + " ways\n" + rCount + " relations"); } // Second run if we are in complete-relation-mode if (extraWayMap != null) { complete = false; reader = new OsmosisReader(new FileInputStream(input)); reader.setSink(new Sink() { @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { if (ec instanceof WayContainer) { Way w = ((WayContainer) ec).getEntity(); Collection<Long> tileList = extraWayMap.get(w.getId()); if (tileList != null) { addExtraWayToMap(w, tileList); } } } @Override public void initialize(Map<String, Object> metaData) { // not used } @Override public void close() { // not used } }); readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not read file fully in second run"); } } } private boolean isInside(double x, double y, double[] polygon) { boolean in = false; int lines = polygon.length / 2; for (int i = 0, j = lines - 1; i < lines; j = i++) { if (((polygon[2 * i + 1] > y) != (polygon[2 * j + 1] > y)) && (x < (polygon[2 * j] - polygon[2 * i]) * (y - polygon[2 * i + 1]) / (polygon[2 * j + 1] - polygon[2 * i + 1]) + polygon[2 * i])) { in = !in; } } return in; } private boolean isInside(int tx, int ty, double[] polygon) { for (int u = 0; u < 2; u++) { for (int v = 0; v < 2; v++) { double x = tile2lon(tx + u); double y = tile2lat(ty + v); if (isInside(x, y, polygon)) { return true; } } } return false; } private boolean isInside(int tx, int ty, List<double[]> inside, List<double[]> outside) { boolean in = false; for (double[] polygon : inside) { in |= isInside(tx, ty, polygon); if (in) { break; } } if (!in) { return false; } for (double[] polygon : outside) { if (isInside(tx, ty, polygon)) { return false; } } return true; } public void clipPoly(String polygonFile) throws IOException { List<double[]> inside = new ArrayList<double[]>(); List<double[]> outside = new ArrayList<double[]>(); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(polygonFile))); /* String name = */ br.readLine(); // unused.. String poly = br.readLine(); while (!"END".equals(poly)) { int pos = 0; int size = 128; double[] data = new double[2 * size]; String coords = br.readLine(); while (!"END".equals(coords)) { coords = coords.trim(); int idx = coords.indexOf(" "); double lon = Double.parseDouble(coords.substring(0, idx)); double lat = Double.parseDouble(coords.substring(idx + 1)); // check if there's enough space to store if (pos >= size) { double[] tmp = new double[4 * size]; System.arraycopy(data, 0, tmp, 0, 2 * size); size *= 2; data = tmp; } // store data data[2 * pos] = lon; data[2 * pos + 1] = lat; pos++; coords = br.readLine(); } if (pos != size) { double[] tmp = new double[2 * pos]; System.arraycopy(data, 0, tmp, 0, 2 * pos); data = tmp; } if (poly.startsWith("!")) { outside.add(data); } else { inside.add(data); } // read next polygon, if there's any poly = br.readLine(); } // now walk modifiedTiles and clear bits that are not inside polygon int idx = 0; while (true) { idx = modifiedTiles.nextSetBit(idx + 1); if (idx == -1) { break; } int tx = idx >> 13; int ty = idx & 8191; boolean in = isInside(tx, ty, inside, outside); if (!in) { modifiedTiles.clear(idx); } } } /** * Read the input file, process the OSM elements and write them out * * @param basename the basename for individual tile files or the name of a MBTiles format sqlite database * @param metadata write metadata (version, timestamp, etc) * @param verbose verbose output if true * @param mbTiles write to a MBTiles format sqlite database instead of writing individual tiles * @throws IOException if reading or creating the files has an issue */ public void store(@NotNull String basename, boolean metadata, boolean verbose, boolean mbTiles) throws IOException { int idx = 0; MBTilesWriter w = null; if (mbTiles) { try { w = new MBTilesWriter(new File(basename)); } catch (MBTilesWriteException e1) { throw new IOException(e1); } } // We might call this code several times if we have more tiles // to store than open files allowed while (true) { complete = false; outFiles = new HashMap<Integer, OsmosisSerializer>(); if (mbTiles) { outBlobs = new HashMap<Integer, ByteArrayOutputStream>(); } // Setup out-files... int count = 0; while (true) { idx = modifiedTiles.nextSetBit(idx + 1); if (idx == -1) { break; } if (outFiles.get(idx) == null) { int tileX = idx >> 13; int tileY = idx & 8191; OutputStream target = null; if (mbTiles) { target = new ByteArrayOutputStream(); } else { String file; if (basename.contains("%x") && basename.contains("%y")) { file = basename.replace("%x", Integer.toString(tileX)).replace("%y", Integer.toString(tileY)); if (!file.endsWith(PBF_EXT)) { file = file + PBF_EXT; } } else { file = basename + tileX + "_" + tileY + PBF_EXT; } target = new FileOutputStream(file); } OsmosisSerializer serializer = new OsmosisSerializer(new BlockOutputStream(target)); serializer.setUseDense(true); serializer.configOmit(!metadata); // write out the bound for that tile Bound bound = getBound(tileX, tileY); BoundContainer bc = new BoundContainer(bound); serializer.process(bc); outFiles.put(idx, serializer); if (mbTiles) { outBlobs.put(idx, (ByteArrayOutputStream) target); } } if ((maxFiles != -1) && (++count >= maxFiles)) { break; } } // Now start writing output... RunnableSource reader = new OsmosisReader(new FileInputStream(input)); class BoundSink implements Sink { Bound overallBounds = null; /** * Get the overall bounds of the data * * @return a Bound object or null */ Bound getBounds() { return overallBounds; } @Override public void complete() { complete = true; } @Override public void process(EntityContainer ec) { long id = ec.getEntity().getId(); List<Integer> tiles; if (ec instanceof NodeContainer) { tiles = nmap.getAllTiles(id); } else if (ec instanceof WayContainer) { tiles = wmap.getAllTiles(id); } else if (ec instanceof RelationContainer) { tiles = rmap.getAllTiles(id); } else if (ec instanceof BoundContainer) { Bound bounds = ((BoundContainer) ec).getEntity(); if (overallBounds == null) { overallBounds = bounds; } else { overallBounds.union(bounds); } return; } else { System.err.println("Unknown Element while reading"); System.err.println(ec.toString()); System.err.println(ec.getEntity().toString()); return; } if (tiles == null) { // No tile where we could store the given entity into // This probably is a degenerated relation ;) return; } for (int i : tiles) { if (modifiedTiles.get(i)) { OsmosisSerializer ser = outFiles.get(i); if (ser != null) { ser.process(ec); } } } } @Override public void initialize(Map<String, Object> metaData) { // do nothing } @Override public void close() { // do nothing } } ; BoundSink sink = new BoundSink(); reader.setSink(sink); Thread readerThread = new Thread(reader); readerThread.start(); while (readerThread.isAlive()) { try { readerThread.join(); } catch (InterruptedException e) { e.printStackTrace(); } } if (!complete) { throw new IOException("Could not fully read file in storing run"); } // Finish and close files... for (Entry<Integer, OsmosisSerializer> entry : outFiles.entrySet()) { OsmosisSerializer ser = entry.getValue(); ser.complete(); ser.flush(); ser.close(); if (mbTiles) { int tileX = entry.getKey() >> 13; int tileY = entry.getKey() & 8191; int y = YMAX - tileY - 1; // TMS scheme ByteArrayOutputStream blob = outBlobs.get(entry.getKey()); try { w.addTile(blob.toByteArray(), 13, tileX, y); } catch (MBTilesWriteException e) { throw new IOException(e); } } } if (idx == -1) { // Add metadata parts if (mbTiles) { MetadataEntry ent = new MetadataEntry(); File file = new File(basename); ent.setTilesetName(file.getName()).setTilesetType(MetadataEntry.TileSetType.BASE_LAYER).setTilesetVersion("0.2.0") .setAttribution("OpenStreetMap Contributors ODbL 1.0").addCustomKeyValue("format", "application/vnd.openstreetmap.data+pbf") .addCustomKeyValue("minzoom", Integer.toString(ZOOM)).addCustomKeyValue("maxzoom", Integer.toString(ZOOM)); Bound bounds = sink.getBounds(); if (bounds != null) { ent.setTilesetBounds(bounds.getLeft(), bounds.getBottom(), bounds.getRight(), bounds.getTop()); } else { ent.setTilesetBounds(-180, -85, 180, 85); } try { w.addMetadataEntry(ent); } catch (MBTilesWriteException e) { throw new IOException(e); } w.close(); } break; } if (verbose) { System.out.println("Wrote " + maxFiles + " tiles, continuing with next block of tiles"); } } } /** * Set up options from the command line and run the tiler * * @param inputFile the input PBF file * @param outputBase * @param polygonFile * @param mapSizes * @param maxFiles * @param border * @param appointmentDate * @param metadata * @param verbose * @param timing * @param completeRelations * @param mbTiles * @return * @throws Exception */ private static Date run(@NotNull String inputFile, @NotNull String outputBase, @Nullable String polygonFile, int[] mapSizes, int maxFiles, double border, Date appointmentDate, boolean metadata, boolean verbose, boolean timing, boolean completeRelations, boolean mbTiles) throws Exception { long startup = System.currentTimeMillis(); MapSplit split = new MapSplit(appointmentDate, mapSizes, maxFiles, border, new File(inputFile), completeRelations); long time = System.currentTimeMillis(); split.setup(verbose); time = System.currentTimeMillis() - time; double nratio = split.nmap.getMissHitRatio(); double wratio = split.wmap.getMissHitRatio(); double rratio = split.rmap.getMissHitRatio(); if (polygonFile != null) { if (verbose) { System.out.println("Clip tiles with polygon given by \"" + polygonFile + "\""); } split.clipPoly(polygonFile); } int modified = split.modifiedTiles.cardinality(); if (timing) { System.out.println("Initial reading and datastructure setup took " + time + "ms"); } if (verbose) { System.out.println("We have " + modified + " modified tiles to store."); } time = System.currentTimeMillis(); split.store(outputBase, metadata, verbose, mbTiles); time = System.currentTimeMillis() - time; if (timing) { System.out.println("Saving " + modified + " tiles took " + time + "ms"); long overall = System.currentTimeMillis() - startup; System.out.print("\nOverall runtime: " + overall + "ms"); System.out.println(" == " + (overall / 1000 / 60) + "min"); } if (verbose) { System.out.println("\nHashmap's load:"); System.out.println("Nodes : " + split.nmap.getLoad()); System.out.println("Ways : " + split.wmap.getLoad()); System.out.println("Relations: " + split.rmap.getLoad()); System.out.println("\nHashmap's MissHitRatio:"); System.out.printf("Nodes : %10.6f\n", nratio); System.out.printf("Ways : %10.6f\n", wratio); System.out.printf("Relations: %10.6f\n", rratio); } return split.latestDate; } public static void main(String[] args) throws Exception { Date appointmentDate; String inputFile = null; String outputBase = null; String polygonFile = null; boolean verbose = false; boolean timing = false; boolean metadata = false; boolean completeRelations = false; boolean mbTiles = false; String dateFile = null; int[] mapSizes = new int[] { NODE_MAP_SIZE, WAY_MAP_SIZE, RELATION_MAP_SIZE }; int maxFiles = -1; double border = 0.0; // arguments Option helpOption = Option.builder("h").longOpt("help").desc("this help").build(); Option verboseOption = Option.builder("v").longOpt("verbose").desc("verbose information during processing").build(); Option timingOption = Option.builder("t").longOpt("timing").desc("output timing information").build(); Option metadataOption = Option.builder("m").longOpt("metadata").desc("store metadata in tile-files (version, timestamp)").build(); Option completeMPOption = Option.builder("c").longOpt("complete").desc("store complete data for multi polygons").build(); Option mbTilesOption = Option.builder("x").longOpt("mbtiles").desc("store in a MBTiles format sqlite database").build(); Option maxFilesOption = Option.builder("f").longOpt("maxfiles").hasArg().desc("maximum number of open files at a time").build(); Option borderOption = Option.builder("b").longOpt("border").hasArg() .desc("enlarge tiles by val ([0-1[) of the tile's size to get a border around the tile.").build(); Option polygonOption = Option.builder("p").longOpt("polygon").hasArg().desc("only save tiles that intersect or lie within the given polygon file.") .build(); Option dateOption = Option.builder("d").longOpt("date").hasArg().desc( "file containing the date since when tiles are being considered to have changed after the split the latest change in infile is going to be stored in file") .build(); Option sizeOption = Option.builder("s").longOpt("size").hasArg().desc( "n,w,r the size for the node-, way- and relation maps to use (should be at least twice the number of IDs). If not supplied, defaults will be taken.") .build(); Option inputOption = Option.builder("i").longOpt("input").hasArgs().desc("a file in OSM pbf format").required().build(); Option outputOption = Option.builder("o").longOpt("output").hasArg().desc( "if creating a MBTiels files this is the name of the file, otherwise this is the base name of all tiles that will be written. The filename may contain '%x' and '%y' which will be replaced with the tilenumbers at zoom 13") .required().build(); Options options = new Options(); options.addOption(helpOption); options.addOption(verboseOption); options.addOption(timingOption); options.addOption(metadataOption); options.addOption(completeMPOption); options.addOption(mbTilesOption); options.addOption(maxFilesOption); options.addOption(borderOption); options.addOption(polygonOption); options.addOption(dateOption); options.addOption(sizeOption); options.addOption(inputOption); options.addOption(outputOption); CommandLineParser parser = new DefaultParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mapsplit", options); return; } if (line.hasOption("v")) { verbose = true; } if (line.hasOption("t")) { timing = true; } if (line.hasOption("m")) { metadata = true; } if (line.hasOption("c")) { completeRelations = true; } if (line.hasOption("x")) { mbTiles = true; } if (line.hasOption("f")) { String tmp = line.getOptionValue("maxfiles"); maxFiles = Integer.valueOf(tmp); } if (line.hasOption("d")) { dateFile = line.getOptionValue("date"); } if (line.hasOption("p")) { polygonFile = line.getOptionValue("ploygon"); } if (line.hasOption("s")) { String tmp = line.getOptionValue("size"); String[] vals = tmp.split(","); for (int j = 0; j < 3; j++) { mapSizes[j] = Integer.valueOf(vals[j]); } } if (line.hasOption("b")) { String tmp = line.getOptionValue("border"); try { border = Double.valueOf(tmp); if (border < 0) { border = 0; } if (border > 1) { border = 1; } } catch (NumberFormatException e) { System.err.println("Could not parse border parameter, falling back to defaults"); } } if (line.hasOption("i")) { inputFile = line.getOptionValue("input"); } if (line.hasOption("o")) { outputBase = line.getOptionValue("output"); } } catch (ParseException exp) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("mapsplit", options); return; } // Date-setup as fall-back option DateFormat df = DateFormat.getDateTimeInstance(); appointmentDate = new Date(0); if (dateFile == null && verbose) { System.out.println("No datefile given. Writing all available tiles."); } else if (dateFile != null) { File file = new File(dateFile); if (file.exists()) { DataInputStream dis = new DataInputStream(new FileInputStream(file)); String line = dis.readUTF(); if (line != null) { try { appointmentDate = df.parse(line); } catch (java.text.ParseException pe) { if (verbose) { System.out.println("Could not parse datefile."); } } } dis.close(); } else if (verbose) { System.out.println("Datefile does not exist, writing all tiles"); } } if (verbose) { System.out.println("Reading: " + inputFile); System.out.println("Writing: " + outputBase); } // Actually run the splitter... Date latest = run(inputFile, outputBase, polygonFile, mapSizes, maxFiles, border, appointmentDate, metadata, verbose, timing, completeRelations, mbTiles); if (verbose) { System.out.println("Last changes to the map had been done on " + df.format(latest)); } if (dateFile != null) { try (DataOutputStream dos = new DataOutputStream(new FileOutputStream(dateFile));) { dos.writeUTF(df.format(latest)); } } } }
Proper fix for https://github.com/PedaB/mapsplit/issues/2 Relation members should only be added to tiles if we are in completeRelation mode. The old behaviour led to ways being added without their nodes.
src/main/java/MapSplit.java
Proper fix for https://github.com/PedaB/mapsplit/issues/2
Java
mpl-2.0
cb334eff5809dcb3deb837b8a5fcbccd0b641f4c
0
the8472/mldht,the8472/mldht
/* * This file is part of mlDHT. * * mlDHT is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * mlDHT is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with mlDHT. If not, see <http://www.gnu.org/licenses/>. */ package lbms.plugins.mldht.kad.tasks; import java.util.HashMap; import java.util.Map; import lbms.plugins.mldht.kad.DHT; import lbms.plugins.mldht.kad.KBucket; import lbms.plugins.mldht.kad.KBucketEntry; import lbms.plugins.mldht.kad.Node; import lbms.plugins.mldht.kad.RPCCall; import lbms.plugins.mldht.kad.RPCServer; import lbms.plugins.mldht.kad.messages.MessageBase; import lbms.plugins.mldht.kad.messages.PingRequest; /** * @author Damokles * */ public class PingRefreshTask extends Task { private boolean cleanOnTimeout; private Map<MessageBase, KBucketEntry> lookupMap; /** * @param rpc * @param node * @param bucket the bucket to refresh * @param cleanOnTimeout if true Nodes that fail to respond are removed. should be false for normal use. */ public PingRefreshTask (RPCServer rpc, Node node, KBucket bucket, boolean cleanOnTimeout) { super(rpc.getDerivedID(),rpc, node); this.cleanOnTimeout = cleanOnTimeout; if (cleanOnTimeout) { lookupMap = new HashMap<MessageBase, KBucketEntry>(); } if (bucket != null) { for (KBucketEntry e : bucket.getEntries()) { if (e.isQuestionable() || cleanOnTimeout) { todo.add(e); } } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callFinished(lbms.plugins.mldht.kad.RPCCallBase, lbms.plugins.mldht.kad.messages.MessageBase) */ @Override void callFinished (RPCCall c, MessageBase rsp) { if (cleanOnTimeout) { synchronized (lookupMap) { lookupMap.remove(c.getRequest()); } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callTimeout(lbms.plugins.mldht.kad.RPCCallBase) */ @Override void callTimeout (RPCCall c) { if (cleanOnTimeout) { MessageBase mb = c.getRequest(); synchronized (lookupMap) { if (lookupMap.containsKey(mb)) { KBucketEntry e = lookupMap.remove(mb); KBucket bucket = node.findBucketForId(e.getID()).getBucket(); if (bucket != null) { DHT.logDebug("Removing invalid entry from cache."); bucket.removeEntryIfBad(e, true); } } } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#update() */ @Override void update () { // go over the todo list and send ping // until we have nothing left synchronized (todo) { while (!todo.isEmpty() && canDoRequest()) { KBucketEntry e = todo.first(); todo.remove(e); if (e.isGood()) { //Node responded in the meantime continue; } PingRequest pr = new PingRequest(); pr.setDestination(e.getAddress()); if (cleanOnTimeout) { synchronized (lookupMap) { lookupMap.put(pr, e); } } rpcCall(pr,e.getID(),null); } } } @Override protected boolean isDone() { return todo.isEmpty() && getNumOutstandingRequests() == 0 && !isFinished(); } }
src/lbms/plugins/mldht/kad/tasks/PingRefreshTask.java
/* * This file is part of mlDHT. * * mlDHT is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * mlDHT is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with mlDHT. If not, see <http://www.gnu.org/licenses/>. */ package lbms.plugins.mldht.kad.tasks; import java.util.HashMap; import java.util.Map; import lbms.plugins.mldht.kad.DHT; import lbms.plugins.mldht.kad.KBucket; import lbms.plugins.mldht.kad.KBucketEntry; import lbms.plugins.mldht.kad.Node; import lbms.plugins.mldht.kad.RPCCall; import lbms.plugins.mldht.kad.RPCServer; import lbms.plugins.mldht.kad.messages.MessageBase; import lbms.plugins.mldht.kad.messages.PingRequest; /** * @author Damokles * */ public class PingRefreshTask extends Task { private boolean cleanOnTimeout; private Map<MessageBase, KBucketEntry> lookupMap; /** * @param rpc * @param node * @param bucket the bucket to refresh * @param cleanOnTimeout if true Nodes that fail to respond are removed. should be false for normal use. */ public PingRefreshTask (RPCServer rpc, Node node, KBucket bucket, boolean cleanOnTimeout) { super(node.getRootID(),rpc, node); this.cleanOnTimeout = cleanOnTimeout; if (cleanOnTimeout) { lookupMap = new HashMap<MessageBase, KBucketEntry>(); } if (bucket != null) { for (KBucketEntry e : bucket.getEntries()) { if (e.isQuestionable() || cleanOnTimeout) { todo.add(e); } } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callFinished(lbms.plugins.mldht.kad.RPCCallBase, lbms.plugins.mldht.kad.messages.MessageBase) */ @Override void callFinished (RPCCall c, MessageBase rsp) { if (cleanOnTimeout) { synchronized (lookupMap) { lookupMap.remove(c.getRequest()); } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#callTimeout(lbms.plugins.mldht.kad.RPCCallBase) */ @Override void callTimeout (RPCCall c) { if (cleanOnTimeout) { MessageBase mb = c.getRequest(); synchronized (lookupMap) { if (lookupMap.containsKey(mb)) { KBucketEntry e = lookupMap.remove(mb); KBucket bucket = node.findBucketForId(e.getID()).getBucket(); if (bucket != null) { DHT.logDebug("Removing invalid entry from cache."); bucket.removeEntryIfBad(e, true); } } } } } /* (non-Javadoc) * @see lbms.plugins.mldht.kad.Task#update() */ @Override void update () { // go over the todo list and send ping // until we have nothing left synchronized (todo) { while (!todo.isEmpty() && canDoRequest()) { KBucketEntry e = todo.first(); todo.remove(e); if (e.isGood()) { //Node responded in the meantime continue; } PingRequest pr = new PingRequest(); pr.setDestination(e.getAddress()); if (cleanOnTimeout) { synchronized (lookupMap) { lookupMap.put(pr, e); } } rpcCall(pr,e.getID(),null); } } } @Override protected boolean isDone() { return todo.isEmpty() && getNumOutstandingRequests() == 0 && !isFinished(); } }
just some aesthetics for the logfiles: give ping tasks a different target key, even though it's not really relevant for the pings themselves
src/lbms/plugins/mldht/kad/tasks/PingRefreshTask.java
just some aesthetics for the logfiles: give ping tasks a different target key, even though it's not really relevant for the pings themselves
Java
agpl-3.0
78ad9d176229b391dfc1c519b86cf3e1cf6d8b50
0
kumarrus/voltdb,deerwalk/voltdb,migue/voltdb,creative-quant/voltdb,ingted/voltdb,flybird119/voltdb,deerwalk/voltdb,VoltDB/voltdb,kumarrus/voltdb,kumarrus/voltdb,kobronson/cs-voltdb,migue/voltdb,deerwalk/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,zuowang/voltdb,migue/voltdb,wolffcm/voltdb,kobronson/cs-voltdb,paulmartel/voltdb,zuowang/voltdb,deerwalk/voltdb,creative-quant/voltdb,kobronson/cs-voltdb,paulmartel/voltdb,zuowang/voltdb,VoltDB/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,flybird119/voltdb,ingted/voltdb,ingted/voltdb,paulmartel/voltdb,kobronson/cs-voltdb,migue/voltdb,VoltDB/voltdb,VoltDB/voltdb,wolffcm/voltdb,deerwalk/voltdb,kumarrus/voltdb,deerwalk/voltdb,kumarrus/voltdb,wolffcm/voltdb,wolffcm/voltdb,kumarrus/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,paulmartel/voltdb,flybird119/voltdb,creative-quant/voltdb,creative-quant/voltdb,creative-quant/voltdb,kumarrus/voltdb,kobronson/cs-voltdb,wolffcm/voltdb,simonzhangsm/voltdb,creative-quant/voltdb,migue/voltdb,kobronson/cs-voltdb,VoltDB/voltdb,flybird119/voltdb,flybird119/voltdb,paulmartel/voltdb,ingted/voltdb,simonzhangsm/voltdb,flybird119/voltdb,paulmartel/voltdb,zuowang/voltdb,flybird119/voltdb,migue/voltdb,wolffcm/voltdb,migue/voltdb,deerwalk/voltdb,VoltDB/voltdb,zuowang/voltdb,ingted/voltdb,VoltDB/voltdb,deerwalk/voltdb,flybird119/voltdb,simonzhangsm/voltdb,migue/voltdb,wolffcm/voltdb,paulmartel/voltdb,ingted/voltdb,zuowang/voltdb,kobronson/cs-voltdb,zuowang/voltdb,kobronson/cs-voltdb,zuowang/voltdb,simonzhangsm/voltdb,kumarrus/voltdb,creative-quant/voltdb,wolffcm/voltdb,ingted/voltdb,ingted/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb; import java.io.File; import java.io.PrintStream; import java.io.PrintWriter; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.TimeZone; import org.voltcore.logging.VoltLogger; import org.voltcore.messaging.HostMessenger; import org.voltcore.utils.PortGenerator; import org.voltdb.types.TimestampType; import org.voltdb.utils.MiscUtils; import org.voltdb.utils.PlatformProperties; /** * VoltDB provides main() for the VoltDB server */ public class VoltDB { /** Global constants */ public static final int DEFAULT_PORT = 21212; public static final int DEFAULT_ADMIN_PORT = 21211; public static final int DEFAULT_INTERNAL_PORT = 3021; public static final int DEFAULT_ZK_PORT = 2181; public static final String DEFAULT_EXTERNAL_INTERFACE = ""; public static final String DEFAULT_INTERNAL_INTERFACE = ""; public static final int DEFAULT_DR_PORT = 5555; public static final int BACKWARD_TIME_FORGIVENESS_WINDOW_MS = 3000; public static final int INITIATOR_SITE_ID = 0; public static final int SITES_TO_HOST_DIVISOR = 100; public static final int MAX_SITES_PER_HOST = 128; // Utility to calculate whether Iv2 is enabled or not for test cases. // There are several ways to enable Iv2, of course. Ideally, use a cluster // command line flag (enableiv2). Second best, use the VOLT_ENABLEIV2 // environment variable. // // IMPORTANT: To determine if Iv2 is enabled at runtime, // call RealVoltDB.isIV2Enabled(); public static boolean checkTestEnvForIv2() { String iv2 = System.getenv().get("VOLT_ENABLEIV2"); if (iv2 == null) { iv2 = System.getProperty("VOLT_ENABLEIV2"); } if (iv2 != null && iv2.equalsIgnoreCase("false")) { return false; } else { return true; } } // The name of the SQLStmt implied by a statement procedure's sql statement. public static final String ANON_STMT_NAME = "sql"; public enum START_ACTION { CREATE, RECOVER, REJOIN, LIVE_REJOIN, JOIN } public static boolean createForRejoin(VoltDB.START_ACTION startAction) { return startAction == VoltDB.START_ACTION.REJOIN || startAction == VoltDB.START_ACTION.LIVE_REJOIN; } public static final Charset UTF8ENCODING = Charset.forName("UTF-8"); //The GMT time zone you know and love public static final TimeZone GMT_TIMEZONE = TimeZone.getTimeZone("GMT+0"); //The time zone Volt is actually using, currently always GMT public static final TimeZone VOLT_TIMEZONE = GMT_TIMEZONE; //Whatever the default timezone was for this locale before we replaced it public static final TimeZone REAL_DEFAULT_TIMEZONE; // ODBC Datetime Format // if you need microseconds, you'll have to change this code or // export a bigint representing microseconds since an epoch public static final String ODBC_DATE_FORMAT_STRING = "yyyy-MM-dd HH:mm:ss.SSS"; // if VoltDB is running in your process, prepare to use UTC (GMT) timezone public synchronized static void setDefaultTimezone() { TimeZone.setDefault(GMT_TIMEZONE); } static { REAL_DEFAULT_TIMEZONE = TimeZone.getDefault(); setDefaultTimezone(); } /** Encapsulates VoltDB configuration parameters */ public static class Configuration { public List<Integer> m_ipcPorts = Collections.synchronizedList(new LinkedList<Integer>()); protected static final VoltLogger hostLog = new VoltLogger("HOST"); /** use normal JNI backend or optional IPC or HSQLDB backends */ public BackendTarget m_backend = BackendTarget.NATIVE_EE_JNI; /** leader hostname */ public String m_leader = null; /** name of the m_catalog JAR file */ public String m_pathToCatalog = null; /** name of the deployment file */ public String m_pathToDeployment = null; /** name of the license file, for commercial editions */ public String m_pathToLicense = "license.xml"; /** false if voltdb.so shouldn't be loaded (for example if JVM is * started by voltrun). */ public boolean m_noLoadLibVOLTDB = false; public String m_zkInterface = "127.0.0.1:" + VoltDB.DEFAULT_ZK_PORT; /** port number for the first client interface for each server */ public int m_port = DEFAULT_PORT; /** override for the admin port number in the deployment file */ public int m_adminPort = -1; /** port number to use to build intra-cluster mesh */ public int m_internalPort = DEFAULT_INTERNAL_PORT; /** interface to listen to clients on (default: any) */ public String m_externalInterface = DEFAULT_EXTERNAL_INTERFACE; /** interface to use for backchannel comm (default: any) */ public String m_internalInterface = DEFAULT_INTERNAL_INTERFACE; /** port number to use for DR channel (override in the deployment file) */ public int m_drAgentPortStart = -1; /** HTTP port can't be set here, but eventually value will be reflected here */ public int m_httpPort = Integer.MAX_VALUE; /** running the enterprise version? */ public final boolean m_isEnterprise = org.voltdb.utils.MiscUtils.isPro(); public int m_deadHostTimeoutMS = 10000; /** start up action */ public START_ACTION m_startAction = null; /** start mode: normal, paused*/ public OperationMode m_startMode = OperationMode.RUNNING; /** replication role. */ public ReplicationRole m_replicationRole = ReplicationRole.NONE; /** * At rejoin time an interface will be selected. It will be the * internal interface specified on the command line. If none is specified * then the interface that the system selects for connecting to * the pre-existing node is used. It is then stored here * so it can be used for receiving connections by RecoverySiteDestinationProcessor */ public String m_selectedRejoinInterface = null; /** * Whether or not adhoc queries should generate debugging output */ public boolean m_quietAdhoc = false; public final File m_commitLogDir = new File("/tmp"); /** * How much (ms) to skew the timestamp generation for * the TransactionIdManager. Should be ZERO except for tests. */ public long m_timestampTestingSalt = 0; /** true if we're running the rejoin tests. Not used in production. */ public boolean m_isRejoinTest = false; /** set to true to run with iv2 initiation. Good Luck! */ public boolean m_enableIV2 = true; public final Queue<String> m_networkCoreBindings = new ArrayDeque<String>(); public final Queue<String> m_computationCoreBindings = new ArrayDeque<String>(); public final Queue<String> m_executionCoreBindings = new ArrayDeque<String>(); public String m_commandLogBinding = null; public Configuration() { m_enableIV2 = VoltDB.checkTestEnvForIv2(); // Set start action create. The cmd line validates that an action is specified, however, // defaulting it to create for local cluster test scripts m_startAction = VoltDB.START_ACTION.CREATE; } /** Behavior-less arg used to differentiate command lines from "ps" */ public String m_tag; public int getZKPort() { return MiscUtils.getPortFromHostnameColonPort(m_zkInterface, VoltDB.DEFAULT_ZK_PORT); } public Configuration(PortGenerator ports) { // Default iv2 configuration to the environment settings. // Let explicit command line override the environment. m_enableIV2 = VoltDB.checkTestEnvForIv2(); m_port = ports.nextClient(); m_adminPort = ports.nextAdmin(); m_internalPort = ports.next(); m_zkInterface = "127.0.0.1:" + ports.next(); // Set start action create. The cmd line validates that an action is specified, however, // defaulting it to create for local cluster test scripts m_startAction = VoltDB.START_ACTION.CREATE; } public Configuration(String args[]) { String arg; // let the command line override the environment setting for enable iv2. m_enableIV2 = VoltDB.checkTestEnvForIv2(); for (int i=0; i < args.length; ++i) { arg = args[i]; // Some LocalCluster ProcessBuilder instances can result in an empty string // in the array args. Ignore them. if (arg.equals("")) { continue; } // Handle request for help/usage if (arg.equalsIgnoreCase("-h") || arg.equalsIgnoreCase("--help")) { usage(System.out); System.exit(-1); } if (arg.equals("noloadlib")) { m_noLoadLibVOLTDB = true; } else if (arg.equals("ipc")) { m_backend = BackendTarget.NATIVE_EE_IPC; } else if (arg.equals("jni")) { m_backend = BackendTarget.NATIVE_EE_JNI; } else if (arg.equals("hsqldb")) { m_backend = BackendTarget.HSQLDB_BACKEND; } else if (arg.equals("valgrind")) { m_backend = BackendTarget.NATIVE_EE_VALGRIND_IPC; } else if (arg.equals("quietadhoc")) { m_quietAdhoc = true; } // handle from the command line as two strings <catalog> <filename> else if (arg.equals("port")) { m_port = Integer.parseInt(args[++i]); } else if (arg.startsWith("port ")) { m_port = Integer.parseInt(arg.substring("port ".length())); } else if (arg.equals("adminport")) { m_adminPort = Integer.parseInt(args[++i]); } else if (arg.startsWith("adminport ")) { m_adminPort = Integer.parseInt(arg.substring("adminport ".length())); } else if (arg.equals("internalport")) { m_internalPort = Integer.parseInt(args[++i]); } else if (arg.startsWith("internalport ")) { m_internalPort = Integer.parseInt(arg.substring("internalport ".length())); } else if (arg.equals("replicationport")) { m_drAgentPortStart = Integer.parseInt(args[++i]); } else if (arg.startsWith("replicationport ")) { m_drAgentPortStart = Integer.parseInt(arg.substring("replicationport ".length())); } else if (arg.startsWith("zkport")) { m_zkInterface = "127.0.0.1:" + args[++i]; } else if (arg.equals("externalinterface")) { m_externalInterface = args[++i].trim(); } else if (arg.startsWith("externalinterface ")) { m_externalInterface = arg.substring("externalinterface ".length()).trim(); } else if (arg.equals("internalinterface")) { m_internalInterface = args[++i].trim(); } else if (arg.startsWith("internalinterface ")) { m_internalInterface = arg.substring("internalinterface ".length()).trim(); } else if (arg.startsWith("networkbindings")) { for (String core : args[++i].split(",")) { m_networkCoreBindings.offer(core); } System.out.println("Network bindings are " + m_networkCoreBindings); } else if (arg.startsWith("computationbindings")) { for (String core : args[++i].split(",")) { m_computationCoreBindings.offer(core); } System.out.println("Computation bindings are " + m_computationCoreBindings); } else if (arg.startsWith("executionbindings")) { for (String core : args[++i].split(",")) { m_executionCoreBindings.offer(core); } System.out.println("Execution bindings are " + m_executionCoreBindings); } else if (arg.startsWith("commandlogbinding")) { String binding = args[++i]; if (binding.split(",").length > 1) { throw new RuntimeException("Command log only supports a single set of bindings"); } m_commandLogBinding = binding; System.out.println("Commanglog binding is " + m_commandLogBinding); } else if (arg.equals("host") || arg.equals("leader")) { m_leader = args[++i].trim(); } else if (arg.startsWith("host")) { m_leader = arg.substring("host ".length()).trim(); } else if (arg.startsWith("leader")) { m_leader = arg.substring("leader ".length()).trim(); } // synonym for "rejoin host" for backward compatibility else if (arg.equals("rejoinhost")) { m_startAction = START_ACTION.REJOIN; m_leader = args[++i].trim(); } else if (arg.startsWith("rejoinhost ")) { m_startAction = START_ACTION.REJOIN; m_leader = arg.substring("rejoinhost ".length()).trim(); } else if (arg.equals("create")) { m_startAction = START_ACTION.CREATE; } else if (arg.equals("recover")) { m_startAction = START_ACTION.RECOVER; } else if (arg.equals("rejoin")) { m_startAction = START_ACTION.REJOIN; } else if (arg.startsWith("live rejoin")) { m_startAction = START_ACTION.LIVE_REJOIN; } else if (arg.equals("live") && args.length > i + 1 && args[++i].trim().equals("rejoin")) { m_startAction = START_ACTION.LIVE_REJOIN; } else if (arg.startsWith("join")) { m_startAction = START_ACTION.JOIN; } else if (arg.equals("replica")) { // We're starting a replica, so we must create a new database. m_startAction = START_ACTION.CREATE; m_replicationRole = ReplicationRole.REPLICA; } else if (arg.equals("dragentportstart")) { m_drAgentPortStart = Integer.parseInt(args[++i]); } // handle timestampsalt else if (arg.equals("timestampsalt")) { m_timestampTestingSalt = Long.parseLong(args[++i]); } else if (arg.startsWith("timestampsalt ")) { m_timestampTestingSalt = Long.parseLong(arg.substring("timestampsalt ".length())); } // handle behaviorless tag field else if (arg.equals("tag")) { m_tag = args[++i]; } else if (arg.startsWith("tag ")) { m_tag = arg.substring("tag ".length()); } else if (arg.equals("catalog")) { m_pathToCatalog = args[++i]; } // and from ant as a single string "m_catalog filename" else if (arg.startsWith("catalog ")) { m_pathToCatalog = arg.substring("catalog ".length()); } else if (arg.equals("deployment")) { m_pathToDeployment = args[++i]; } else if (arg.equals("license")) { m_pathToLicense = args[++i]; } else if (arg.equalsIgnoreCase("ipcports")) { String portList = args[++i]; String ports[] = portList.split(","); for (String port : ports) { m_ipcPorts.add(Integer.valueOf(port)); } } else if (arg.equals("enableiv2")) { m_enableIV2 = true; } else { hostLog.fatal("Unrecognized option to VoltDB: " + arg); usage(); System.exit(-1); } } // If no action is specified, issue an error. if (null == m_startAction) { if (org.voltdb.utils.MiscUtils.isPro()) { hostLog.fatal("You must specify a startup action, either create, recover, replica, rejoin, or compile."); } else { hostLog.fatal("You must specify a startup action, either create, recover, rejoin, or compile."); } usage(); System.exit(-1); } // ENG-3035 Warn if 'recover' action has a catalog since we won't // be using it. Only cover the 'recover' action since 'start' sometimes // acts as 'recover' and other times as 'create'. if (m_startAction == START_ACTION.RECOVER && m_pathToCatalog != null) { hostLog.warn("Catalog is ignored for 'recover' action."); } /* * ENG-2815 If deployment is null (the user wants the default) and * the start action is not rejoin and leader is null, supply the * only valid leader value ("localhost"). */ if (m_leader == null && m_pathToDeployment == null && (m_startAction != START_ACTION.REJOIN && m_startAction != START_ACTION.LIVE_REJOIN)) { m_leader = "localhost"; } } /** * Validates configuration settings and logs errors to the host log. * You typically want to have the system exit when this fails, but * this functionality is left outside of the method so that it is testable. * @return Returns true if all required configuration settings are present. */ public boolean validate() { boolean isValid = true; if (m_startAction == null) { isValid = false; hostLog.fatal("The startup action is missing (either create, recover, replica or rejoin)."); } if (m_startAction == START_ACTION.CREATE && m_pathToCatalog == null) { isValid = false; hostLog.fatal("The catalog location is missing."); } if (m_leader == null) { isValid = false; hostLog.fatal("The hostname is missing."); } if (m_backend.isIPC) { if (m_ipcPorts.isEmpty()) { isValid = false; hostLog.fatal("Specified an IPC backend but did not supply a , " + " separated list of ports via ipcports param"); } } // require deployment file location if (m_startAction != START_ACTION.REJOIN && m_startAction != START_ACTION.LIVE_REJOIN && m_startAction != START_ACTION.JOIN) { // require deployment file location (null is allowed to receive default deployment) if (m_pathToDeployment != null && m_pathToDeployment.isEmpty()) { isValid = false; hostLog.fatal("The deployment file location is empty."); } if (m_replicationRole == ReplicationRole.REPLICA) { if (m_startAction == START_ACTION.RECOVER) { isValid = false; hostLog.fatal("Replica cluster only supports create database"); } else { m_startAction = START_ACTION.CREATE; } } } else { if (!m_isEnterprise && m_startAction == START_ACTION.LIVE_REJOIN) { // pauseless rejoin is only available in pro isValid = false; hostLog.fatal("Live rejoin is only available in the Enterprise Edition"); } } return isValid; } /** * Prints a usage message on stderr. */ public void usage() { usage(System.err); } /** * Prints a usage message on the designated output stream. */ public void usage(PrintStream os) { // N.B: this text is user visible. It intentionally does NOT reveal options not interesting to, say, the // casual VoltDB operator. Please do not reveal options not documented in the VoltDB documentation set. (See // GettingStarted.pdf). String message = ""; if (org.voltdb.utils.MiscUtils.isPro()) { message = "Usage: voltdb create catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>] license <license.xml>\n" + " voltdb replica catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>] license <license.xml> \n" + " voltdb recover [host <hostname>] [deployment <deployment.xml>] license <license.xml>\n" + " voltdb [live] rejoin host <hostname>\n"; } else { message = "Usage: voltdb create catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>]\n" + " voltdb recover [host <hostname>] [deployment <deployment.xml>]\n" + " voltdb rejoin host <hostname>\n"; } message += " voltdb compile [<option> ...] [<ddl-file> ...] (run voltdb compile -h for more details)\n"; os.print(message); // Log it to log4j as well, which will capture the output to a file for (hopefully never) cases where VEM has issues (it generates command lines). hostLog.info(message); // Don't bother logging these for log4j, only dump them to the designated stream. os.println("If no deployment is specified, a default 1 node cluster deployment will be configured."); } /** * Helper to set the path for compiled jar files. * Could also live in VoltProjectBuilder but any code that creates * a catalog will probably start VoltDB with a Configuration * object. Perhaps this is more convenient? * @return the path chosen for the catalog. */ public String setPathToCatalogForTest(String jarname) { m_pathToCatalog = getPathToCatalogForTest(jarname); return m_pathToCatalog; } public static String getPathToCatalogForTest(String jarname) { String answer = jarname; // first try to get the "right" place to put the thing if (System.getenv("TEST_DIR") != null) { answer = System.getenv("TEST_DIR") + File.separator + jarname; // returns a full path, like a boss return new File(answer).getAbsolutePath(); } // try to find an obj directory String userdir = System.getProperty("user.dir"); String buildMode = System.getProperty("build"); if (buildMode == null) buildMode = "release"; assert(buildMode.length() > 0); if (userdir != null) { File userObjDir = new File(userdir + File.separator + "obj" + File.separator + buildMode); if (userObjDir.exists() && userObjDir.isDirectory() && userObjDir.canWrite()) { File testobjectsDir = new File(userObjDir.getPath() + File.separator + "testobjects"); if (!testobjectsDir.exists()) { boolean created = testobjectsDir.mkdir(); assert(created); } assert(testobjectsDir.isDirectory()); assert(testobjectsDir.canWrite()); return testobjectsDir.getAbsolutePath() + File.separator + jarname; } } // otherwise use a local dir File testObj = new File("testobjects"); if (!testObj.exists()) { testObj.mkdir(); } assert(testObj.isDirectory()); assert(testObj.canWrite()); return testObj.getAbsolutePath() + File.separator + jarname; } } /* helper functions to access current configuration values */ public static boolean getLoadLibVOLTDB() { return !(m_config.m_noLoadLibVOLTDB); } public static BackendTarget getEEBackendType() { return m_config.m_backend; } /* * Create a file that starts with the supplied message that contains * human readable stack traces for all java threads in the current process. */ public static void dropStackTrace(String message) { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd-HH:mm:ss.SSSZ"); String dateString = sdf.format(new Date()); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); HostMessenger hm = VoltDB.instance().getHostMessenger(); int hostId = 0; if (hm != null) { hostId = hm.getHostId(); } String root = catalogContext != null ? catalogContext.cluster.getVoltroot() + File.separator : ""; try { PrintWriter writer = new PrintWriter(root + "host" + hostId + "-" + dateString + ".txt"); writer.println(message); printStackTraces(writer); } catch (Exception e) { try { VoltLogger log = new VoltLogger("HOST"); log.error("Error while dropping stack trace for \"" + message + "\"", e); } catch (RuntimeException rt_ex) { e.printStackTrace(); } } } /* * Print stack traces for all java threads in the current process to the supplied writer */ public static void printStackTraces(PrintWriter writer) { printStackTraces(writer, null); } /* * Print stack traces for all threads in the process to the supplied writer. * If a List is supplied then the stack frames for the current thread will be placed in it */ public static void printStackTraces(PrintWriter writer, List<String> currentStacktrace) { if (currentStacktrace == null) { currentStacktrace = new ArrayList<String>(); } Map<Thread, StackTraceElement[]> traces = Thread.getAllStackTraces(); StackTraceElement[] myTrace = traces.get(Thread.currentThread()); for (StackTraceElement ste : myTrace) { currentStacktrace.add(ste.toString()); } writer.println(); writer.println("****** Current Thread ****** "); for (String currentStackElem : currentStacktrace) { writer.println(currentStackElem); } writer.println("****** All Threads ******"); Iterator<Thread> it = traces.keySet().iterator(); while (it.hasNext()) { Thread key = it.next(); writer.println(); StackTraceElement[] st = traces.get(key); writer.println("****** " + key + " ******"); for (StackTraceElement ste : st) writer.println(ste); } } /** * Exit the process with an error message, optionally with a stack trace. */ public static void crashLocalVoltDB(String errMsg, boolean stackTrace, Throwable thrown) { wasCrashCalled = true; crashMessage = errMsg; if (ignoreCrash) { throw new AssertionError("Faux crash of VoltDB successful."); } // Even if the logger is null, don't stop. We want to log the stack trace and // any other pertinent information to a .dmp file for crash diagnosis List<String> currentStacktrace = new ArrayList<String>(); currentStacktrace.add("Stack trace from crashLocalVoltDB() method:"); // Create a special dump file to hold the stack trace try { TimestampType ts = new TimestampType(new java.util.Date()); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); String root = catalogContext != null ? catalogContext.cluster.getVoltroot() + File.separator : ""; PrintWriter writer = new PrintWriter(root + "voltdb_crash" + ts.toString().replace(' ', '-') + ".txt"); writer.println("Time: " + ts); writer.println("Message: " + errMsg); writer.println(); writer.println("Platform Properties:"); PlatformProperties pp = PlatformProperties.getPlatformProperties(); String[] lines = pp.toLogLines().split("\n"); for (String line : lines) { writer.println(line.trim()); } if (thrown != null) { writer.println(); writer.println("****** Exception Thread ****** "); thrown.printStackTrace(writer); } printStackTraces(writer, currentStacktrace); writer.close(); } catch (Throwable err) { // shouldn't fail, but.. err.printStackTrace(); } VoltLogger log = null; try { log = new VoltLogger("HOST"); } catch (RuntimeException rt_ex) { /* ignore */ } if (log != null) { log.fatal(errMsg); if (thrown != null) { if (stackTrace) { log.fatal("Fatal exception", thrown); } else { log.fatal(thrown.toString()); } } else { if (stackTrace) { for (String currentStackElem : currentStacktrace) { log.fatal(currentStackElem); } } } } else { System.err.println(errMsg); if (thrown != null) { if (stackTrace) { thrown.printStackTrace(); } else { System.err.println(thrown.toString()); } } else { if (stackTrace) { for (String currentStackElem : currentStacktrace) { System.err.println(currentStackElem); } } } } System.err.println("VoltDB has encountered an unrecoverable error and is exiting."); System.err.println("The log may contain additional information."); System.exit(-1); } /* * For tests that causes failures, * allow them stop the crash and inspect. */ public static boolean ignoreCrash = false; public static boolean wasCrashCalled = false; public static String crashMessage; /** * Exit the process with an error message, optionally with a stack trace. * Also notify all connected peers that the node is going down. */ public static void crashGlobalVoltDB(String errMsg, boolean stackTrace, Throwable t) { wasCrashCalled = true; crashMessage = errMsg; if (ignoreCrash) { throw new AssertionError("Faux crash of VoltDB successful."); } try { instance().getHostMessenger().sendPoisonPill(errMsg); } catch (Exception e) { e.printStackTrace(); } try { Thread.sleep(500); } catch (InterruptedException e) {} crashLocalVoltDB(errMsg, stackTrace, t); } /** * Entry point for the VoltDB server process. * @param args Requires catalog and deployment file locations. */ public static void main(String[] args) { //Thread.setDefaultUncaughtExceptionHandler(new VoltUncaughtExceptionHandler()); Configuration config = new Configuration(args); try { if (!config.validate()) { config.usage(); System.exit(-1); } else { initialize(config); instance().run(); } } catch (OutOfMemoryError e) { String errmsg = "VoltDB Main thread: ran out of Java memory. This node will shut down."; VoltDB.crashLocalVoltDB(errmsg, false, e); } } /** * Initialize the VoltDB server. * @param config The VoltDB.Configuration to use to initialize the server. */ public static void initialize(VoltDB.Configuration config) { m_config = config; instance().initialize(config); } /** * Retrieve a reference to the object implementing VoltDBInterface. When * running a real server (and not a test harness), this instance will only * be useful after calling VoltDB.initialize(). * * @return A reference to the underlying VoltDBInterface object. */ public static VoltDBInterface instance() { return singleton; } /** * Useful only for unit testing. * * Replace the default VoltDB server instance with an instance of * VoltDBInterface that is used for testing. * */ public static void replaceVoltDBInstanceForTest(VoltDBInterface testInstance) { singleton = testInstance; } @Override public Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } private static VoltDB.Configuration m_config = new VoltDB.Configuration(); private static VoltDBInterface singleton = new RealVoltDB(); }
src/frontend/org/voltdb/VoltDB.java
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb; import java.io.File; import java.io.PrintStream; import java.io.PrintWriter; import java.nio.charset.Charset; import java.text.SimpleDateFormat; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Queue; import java.util.TimeZone; import org.voltcore.logging.VoltLogger; import org.voltcore.messaging.HostMessenger; import org.voltcore.utils.PortGenerator; import org.voltdb.types.TimestampType; import org.voltdb.utils.MiscUtils; import org.voltdb.utils.PlatformProperties; /** * VoltDB provides main() for the VoltDB server */ public class VoltDB { /** Global constants */ public static final int DEFAULT_PORT = 21212; public static final int DEFAULT_ADMIN_PORT = 21211; public static final int DEFAULT_INTERNAL_PORT = 3021; public static final int DEFAULT_ZK_PORT = 2181; public static final String DEFAULT_EXTERNAL_INTERFACE = ""; public static final String DEFAULT_INTERNAL_INTERFACE = ""; public static final int DEFAULT_DR_PORT = 5555; public static final int BACKWARD_TIME_FORGIVENESS_WINDOW_MS = 3000; public static final int INITIATOR_SITE_ID = 0; public static final int SITES_TO_HOST_DIVISOR = 100; public static final int MAX_SITES_PER_HOST = 128; // Utility to calculate whether Iv2 is enabled or not for test cases. // There are several ways to enable Iv2, of course. Ideally, use a cluster // command line flag (enableiv2). Second best, use the VOLT_ENABLEIV2 // environment variable. // // IMPORTANT: To determine if Iv2 is enabled at runtime, // call RealVoltDB.isIV2Enabled(); public static boolean checkTestEnvForIv2() { String iv2 = System.getenv().get("VOLT_ENABLEIV2"); if (iv2 == null) { iv2 = System.getProperty("VOLT_ENABLEIV2"); } if (iv2 != null && iv2.equalsIgnoreCase("false")) { return false; } else { return true; } } // The name of the SQLStmt implied by a statement procedure's sql statement. public static final String ANON_STMT_NAME = "sql"; public enum START_ACTION { CREATE, RECOVER, REJOIN, LIVE_REJOIN } public static boolean createForRejoin(VoltDB.START_ACTION startAction) { return startAction == VoltDB.START_ACTION.REJOIN || startAction == VoltDB.START_ACTION.LIVE_REJOIN; } public static final Charset UTF8ENCODING = Charset.forName("UTF-8"); //The GMT time zone you know and love public static final TimeZone GMT_TIMEZONE = TimeZone.getTimeZone("GMT+0"); //The time zone Volt is actually using, currently always GMT public static final TimeZone VOLT_TIMEZONE = GMT_TIMEZONE; //Whatever the default timezone was for this locale before we replaced it public static final TimeZone REAL_DEFAULT_TIMEZONE; // ODBC Datetime Format // if you need microseconds, you'll have to change this code or // export a bigint representing microseconds since an epoch public static final String ODBC_DATE_FORMAT_STRING = "yyyy-MM-dd HH:mm:ss.SSS"; // if VoltDB is running in your process, prepare to use UTC (GMT) timezone public synchronized static void setDefaultTimezone() { TimeZone.setDefault(GMT_TIMEZONE); } static { REAL_DEFAULT_TIMEZONE = TimeZone.getDefault(); setDefaultTimezone(); } /** Encapsulates VoltDB configuration parameters */ public static class Configuration { public List<Integer> m_ipcPorts = Collections.synchronizedList(new LinkedList<Integer>()); protected static final VoltLogger hostLog = new VoltLogger("HOST"); /** use normal JNI backend or optional IPC or HSQLDB backends */ public BackendTarget m_backend = BackendTarget.NATIVE_EE_JNI; /** leader hostname */ public String m_leader = null; /** name of the m_catalog JAR file */ public String m_pathToCatalog = null; /** name of the deployment file */ public String m_pathToDeployment = null; /** name of the license file, for commercial editions */ public String m_pathToLicense = "license.xml"; /** false if voltdb.so shouldn't be loaded (for example if JVM is * started by voltrun). */ public boolean m_noLoadLibVOLTDB = false; public String m_zkInterface = "127.0.0.1:" + VoltDB.DEFAULT_ZK_PORT; /** port number for the first client interface for each server */ public int m_port = DEFAULT_PORT; /** override for the admin port number in the deployment file */ public int m_adminPort = -1; /** port number to use to build intra-cluster mesh */ public int m_internalPort = DEFAULT_INTERNAL_PORT; /** interface to listen to clients on (default: any) */ public String m_externalInterface = DEFAULT_EXTERNAL_INTERFACE; /** interface to use for backchannel comm (default: any) */ public String m_internalInterface = DEFAULT_INTERNAL_INTERFACE; /** port number to use for DR channel (override in the deployment file) */ public int m_drAgentPortStart = -1; /** HTTP port can't be set here, but eventually value will be reflected here */ public int m_httpPort = Integer.MAX_VALUE; /** running the enterprise version? */ public final boolean m_isEnterprise = org.voltdb.utils.MiscUtils.isPro(); public int m_deadHostTimeoutMS = 10000; /** start up action */ public START_ACTION m_startAction = null; /** start mode: normal, paused*/ public OperationMode m_startMode = OperationMode.RUNNING; /** replication role. */ public ReplicationRole m_replicationRole = ReplicationRole.NONE; /** * At rejoin time an interface will be selected. It will be the * internal interface specified on the command line. If none is specified * then the interface that the system selects for connecting to * the pre-existing node is used. It is then stored here * so it can be used for receiving connections by RecoverySiteDestinationProcessor */ public String m_selectedRejoinInterface = null; /** * Whether or not adhoc queries should generate debugging output */ public boolean m_quietAdhoc = false; public final File m_commitLogDir = new File("/tmp"); /** * How much (ms) to skew the timestamp generation for * the TransactionIdManager. Should be ZERO except for tests. */ public long m_timestampTestingSalt = 0; /** true if we're running the rejoin tests. Not used in production. */ public boolean m_isRejoinTest = false; /** set to true to run with iv2 initiation. Good Luck! */ public boolean m_enableIV2 = true; public final Queue<String> m_networkCoreBindings = new ArrayDeque<String>(); public final Queue<String> m_computationCoreBindings = new ArrayDeque<String>(); public final Queue<String> m_executionCoreBindings = new ArrayDeque<String>(); public String m_commandLogBinding = null; public Configuration() { m_enableIV2 = VoltDB.checkTestEnvForIv2(); // Set start action create. The cmd line validates that an action is specified, however, // defaulting it to create for local cluster test scripts m_startAction = VoltDB.START_ACTION.CREATE; } /** Behavior-less arg used to differentiate command lines from "ps" */ public String m_tag; public int getZKPort() { return MiscUtils.getPortFromHostnameColonPort(m_zkInterface, VoltDB.DEFAULT_ZK_PORT); } public Configuration(PortGenerator ports) { // Default iv2 configuration to the environment settings. // Let explicit command line override the environment. m_enableIV2 = VoltDB.checkTestEnvForIv2(); m_port = ports.nextClient(); m_adminPort = ports.nextAdmin(); m_internalPort = ports.next(); m_zkInterface = "127.0.0.1:" + ports.next(); // Set start action create. The cmd line validates that an action is specified, however, // defaulting it to create for local cluster test scripts m_startAction = VoltDB.START_ACTION.CREATE; } public Configuration(String args[]) { String arg; // let the command line override the environment setting for enable iv2. m_enableIV2 = VoltDB.checkTestEnvForIv2(); for (int i=0; i < args.length; ++i) { arg = args[i]; // Some LocalCluster ProcessBuilder instances can result in an empty string // in the array args. Ignore them. if (arg.equals("")) { continue; } // Handle request for help/usage if (arg.equalsIgnoreCase("-h") || arg.equalsIgnoreCase("--help")) { usage(System.out); System.exit(-1); } if (arg.equals("noloadlib")) { m_noLoadLibVOLTDB = true; } else if (arg.equals("ipc")) { m_backend = BackendTarget.NATIVE_EE_IPC; } else if (arg.equals("jni")) { m_backend = BackendTarget.NATIVE_EE_JNI; } else if (arg.equals("hsqldb")) { m_backend = BackendTarget.HSQLDB_BACKEND; } else if (arg.equals("valgrind")) { m_backend = BackendTarget.NATIVE_EE_VALGRIND_IPC; } else if (arg.equals("quietadhoc")) { m_quietAdhoc = true; } // handle from the command line as two strings <catalog> <filename> else if (arg.equals("port")) { m_port = Integer.parseInt(args[++i]); } else if (arg.startsWith("port ")) { m_port = Integer.parseInt(arg.substring("port ".length())); } else if (arg.equals("adminport")) { m_adminPort = Integer.parseInt(args[++i]); } else if (arg.startsWith("adminport ")) { m_adminPort = Integer.parseInt(arg.substring("adminport ".length())); } else if (arg.equals("internalport")) { m_internalPort = Integer.parseInt(args[++i]); } else if (arg.startsWith("internalport ")) { m_internalPort = Integer.parseInt(arg.substring("internalport ".length())); } else if (arg.equals("replicationport")) { m_drAgentPortStart = Integer.parseInt(args[++i]); } else if (arg.startsWith("replicationport ")) { m_drAgentPortStart = Integer.parseInt(arg.substring("replicationport ".length())); } else if (arg.startsWith("zkport")) { m_zkInterface = "127.0.0.1:" + args[++i]; } else if (arg.equals("externalinterface")) { m_externalInterface = args[++i].trim(); } else if (arg.startsWith("externalinterface ")) { m_externalInterface = arg.substring("externalinterface ".length()).trim(); } else if (arg.equals("internalinterface")) { m_internalInterface = args[++i].trim(); } else if (arg.startsWith("internalinterface ")) { m_internalInterface = arg.substring("internalinterface ".length()).trim(); } else if (arg.startsWith("networkbindings")) { for (String core : args[++i].split(",")) { m_networkCoreBindings.offer(core); } System.out.println("Network bindings are " + m_networkCoreBindings); } else if (arg.startsWith("computationbindings")) { for (String core : args[++i].split(",")) { m_computationCoreBindings.offer(core); } System.out.println("Computation bindings are " + m_computationCoreBindings); } else if (arg.startsWith("executionbindings")) { for (String core : args[++i].split(",")) { m_executionCoreBindings.offer(core); } System.out.println("Execution bindings are " + m_executionCoreBindings); } else if (arg.startsWith("commandlogbinding")) { String binding = args[++i]; if (binding.split(",").length > 1) { throw new RuntimeException("Command log only supports a single set of bindings"); } m_commandLogBinding = binding; System.out.println("Commanglog binding is " + m_commandLogBinding); } else if (arg.equals("host") || arg.equals("leader")) { m_leader = args[++i].trim(); } else if (arg.startsWith("host")) { m_leader = arg.substring("host ".length()).trim(); } else if (arg.startsWith("leader")) { m_leader = arg.substring("leader ".length()).trim(); } // synonym for "rejoin host" for backward compatibility else if (arg.equals("rejoinhost")) { m_startAction = START_ACTION.REJOIN; m_leader = args[++i].trim(); } else if (arg.startsWith("rejoinhost ")) { m_startAction = START_ACTION.REJOIN; m_leader = arg.substring("rejoinhost ".length()).trim(); } else if (arg.equals("create")) { m_startAction = START_ACTION.CREATE; } else if (arg.equals("recover")) { m_startAction = START_ACTION.RECOVER; } else if (arg.equals("rejoin")) { m_startAction = START_ACTION.REJOIN; } else if (arg.startsWith("live rejoin")) { m_startAction = START_ACTION.LIVE_REJOIN; } else if (arg.equals("live") && args.length > i + 1 && args[++i].trim().equals("rejoin")) { m_startAction = START_ACTION.LIVE_REJOIN; } else if (arg.equals("replica")) { // We're starting a replica, so we must create a new database. m_startAction = START_ACTION.CREATE; m_replicationRole = ReplicationRole.REPLICA; } else if (arg.equals("dragentportstart")) { m_drAgentPortStart = Integer.parseInt(args[++i]); } // handle timestampsalt else if (arg.equals("timestampsalt")) { m_timestampTestingSalt = Long.parseLong(args[++i]); } else if (arg.startsWith("timestampsalt ")) { m_timestampTestingSalt = Long.parseLong(arg.substring("timestampsalt ".length())); } // handle behaviorless tag field else if (arg.equals("tag")) { m_tag = args[++i]; } else if (arg.startsWith("tag ")) { m_tag = arg.substring("tag ".length()); } else if (arg.equals("catalog")) { m_pathToCatalog = args[++i]; } // and from ant as a single string "m_catalog filename" else if (arg.startsWith("catalog ")) { m_pathToCatalog = arg.substring("catalog ".length()); } else if (arg.equals("deployment")) { m_pathToDeployment = args[++i]; } else if (arg.equals("license")) { m_pathToLicense = args[++i]; } else if (arg.equalsIgnoreCase("ipcports")) { String portList = args[++i]; String ports[] = portList.split(","); for (String port : ports) { m_ipcPorts.add(Integer.valueOf(port)); } } else if (arg.equals("enableiv2")) { m_enableIV2 = true; } else { hostLog.fatal("Unrecognized option to VoltDB: " + arg); usage(); System.exit(-1); } } // If no action is specified, issue an error. if (null == m_startAction) { if (org.voltdb.utils.MiscUtils.isPro()) { hostLog.fatal("You must specify a startup action, either create, recover, replica, rejoin, or compile."); } else { hostLog.fatal("You must specify a startup action, either create, recover, rejoin, or compile."); } usage(); System.exit(-1); } // ENG-3035 Warn if 'recover' action has a catalog since we won't // be using it. Only cover the 'recover' action since 'start' sometimes // acts as 'recover' and other times as 'create'. if (m_startAction == START_ACTION.RECOVER && m_pathToCatalog != null) { hostLog.warn("Catalog is ignored for 'recover' action."); } /* * ENG-2815 If deployment is null (the user wants the default) and * the start action is not rejoin and leader is null, supply the * only valid leader value ("localhost"). */ if (m_leader == null && m_pathToDeployment == null && (m_startAction != START_ACTION.REJOIN && m_startAction != START_ACTION.LIVE_REJOIN)) { m_leader = "localhost"; } } /** * Validates configuration settings and logs errors to the host log. * You typically want to have the system exit when this fails, but * this functionality is left outside of the method so that it is testable. * @return Returns true if all required configuration settings are present. */ public boolean validate() { boolean isValid = true; if (m_startAction == null) { isValid = false; hostLog.fatal("The startup action is missing (either create, recover, replica or rejoin)."); } if (m_startAction == START_ACTION.CREATE && m_pathToCatalog == null) { isValid = false; hostLog.fatal("The catalog location is missing."); } if (m_leader == null) { isValid = false; hostLog.fatal("The hostname is missing."); } if (m_backend.isIPC) { if (m_ipcPorts.isEmpty()) { isValid = false; hostLog.fatal("Specified an IPC backend but did not supply a , " + " separated list of ports via ipcports param"); } } // require deployment file location if (m_startAction != START_ACTION.REJOIN && m_startAction != START_ACTION.LIVE_REJOIN) { // require deployment file location (null is allowed to receive default deployment) if (m_pathToDeployment != null && m_pathToDeployment.isEmpty()) { isValid = false; hostLog.fatal("The deployment file location is empty."); } if (m_replicationRole == ReplicationRole.REPLICA) { if (m_startAction == START_ACTION.RECOVER) { isValid = false; hostLog.fatal("Replica cluster only supports create database"); } else { m_startAction = START_ACTION.CREATE; } } } else { if (!m_isEnterprise && m_startAction == START_ACTION.LIVE_REJOIN) { // pauseless rejoin is only available in pro isValid = false; hostLog.fatal("Live rejoin is only available in the Enterprise Edition"); } } return isValid; } /** * Prints a usage message on stderr. */ public void usage() { usage(System.err); } /** * Prints a usage message on the designated output stream. */ public void usage(PrintStream os) { // N.B: this text is user visible. It intentionally does NOT reveal options not interesting to, say, the // casual VoltDB operator. Please do not reveal options not documented in the VoltDB documentation set. (See // GettingStarted.pdf). String message = ""; if (org.voltdb.utils.MiscUtils.isPro()) { message = "Usage: voltdb create catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>] license <license.xml>\n" + " voltdb replica catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>] license <license.xml> \n" + " voltdb recover [host <hostname>] [deployment <deployment.xml>] license <license.xml>\n" + " voltdb [live] rejoin host <hostname>\n"; } else { message = "Usage: voltdb create catalog <catalog.jar> [host <hostname>] [deployment <deployment.xml>]\n" + " voltdb recover [host <hostname>] [deployment <deployment.xml>]\n" + " voltdb rejoin host <hostname>\n"; } message += " voltdb compile [<option> ...] [<ddl-file> ...] (run voltdb compile -h for more details)\n"; os.print(message); // Log it to log4j as well, which will capture the output to a file for (hopefully never) cases where VEM has issues (it generates command lines). hostLog.info(message); // Don't bother logging these for log4j, only dump them to the designated stream. os.println("If no deployment is specified, a default 1 node cluster deployment will be configured."); } /** * Helper to set the path for compiled jar files. * Could also live in VoltProjectBuilder but any code that creates * a catalog will probably start VoltDB with a Configuration * object. Perhaps this is more convenient? * @return the path chosen for the catalog. */ public String setPathToCatalogForTest(String jarname) { m_pathToCatalog = getPathToCatalogForTest(jarname); return m_pathToCatalog; } public static String getPathToCatalogForTest(String jarname) { String answer = jarname; // first try to get the "right" place to put the thing if (System.getenv("TEST_DIR") != null) { answer = System.getenv("TEST_DIR") + File.separator + jarname; // returns a full path, like a boss return new File(answer).getAbsolutePath(); } // try to find an obj directory String userdir = System.getProperty("user.dir"); String buildMode = System.getProperty("build"); if (buildMode == null) buildMode = "release"; assert(buildMode.length() > 0); if (userdir != null) { File userObjDir = new File(userdir + File.separator + "obj" + File.separator + buildMode); if (userObjDir.exists() && userObjDir.isDirectory() && userObjDir.canWrite()) { File testobjectsDir = new File(userObjDir.getPath() + File.separator + "testobjects"); if (!testobjectsDir.exists()) { boolean created = testobjectsDir.mkdir(); assert(created); } assert(testobjectsDir.isDirectory()); assert(testobjectsDir.canWrite()); return testobjectsDir.getAbsolutePath() + File.separator + jarname; } } // otherwise use a local dir File testObj = new File("testobjects"); if (!testObj.exists()) { testObj.mkdir(); } assert(testObj.isDirectory()); assert(testObj.canWrite()); return testObj.getAbsolutePath() + File.separator + jarname; } } /* helper functions to access current configuration values */ public static boolean getLoadLibVOLTDB() { return !(m_config.m_noLoadLibVOLTDB); } public static BackendTarget getEEBackendType() { return m_config.m_backend; } /* * Create a file that starts with the supplied message that contains * human readable stack traces for all java threads in the current process. */ public static void dropStackTrace(String message) { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd-HH:mm:ss.SSSZ"); String dateString = sdf.format(new Date()); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); HostMessenger hm = VoltDB.instance().getHostMessenger(); int hostId = 0; if (hm != null) { hostId = hm.getHostId(); } String root = catalogContext != null ? catalogContext.cluster.getVoltroot() + File.separator : ""; try { PrintWriter writer = new PrintWriter(root + "host" + hostId + "-" + dateString + ".txt"); writer.println(message); printStackTraces(writer); } catch (Exception e) { try { VoltLogger log = new VoltLogger("HOST"); log.error("Error while dropping stack trace for \"" + message + "\"", e); } catch (RuntimeException rt_ex) { e.printStackTrace(); } } } /* * Print stack traces for all java threads in the current process to the supplied writer */ public static void printStackTraces(PrintWriter writer) { printStackTraces(writer, null); } /* * Print stack traces for all threads in the process to the supplied writer. * If a List is supplied then the stack frames for the current thread will be placed in it */ public static void printStackTraces(PrintWriter writer, List<String> currentStacktrace) { if (currentStacktrace == null) { currentStacktrace = new ArrayList<String>(); } Map<Thread, StackTraceElement[]> traces = Thread.getAllStackTraces(); StackTraceElement[] myTrace = traces.get(Thread.currentThread()); for (StackTraceElement ste : myTrace) { currentStacktrace.add(ste.toString()); } writer.println(); writer.println("****** Current Thread ****** "); for (String currentStackElem : currentStacktrace) { writer.println(currentStackElem); } writer.println("****** All Threads ******"); Iterator<Thread> it = traces.keySet().iterator(); while (it.hasNext()) { Thread key = it.next(); writer.println(); StackTraceElement[] st = traces.get(key); writer.println("****** " + key + " ******"); for (StackTraceElement ste : st) writer.println(ste); } } /** * Exit the process with an error message, optionally with a stack trace. */ public static void crashLocalVoltDB(String errMsg, boolean stackTrace, Throwable thrown) { wasCrashCalled = true; crashMessage = errMsg; if (ignoreCrash) { throw new AssertionError("Faux crash of VoltDB successful."); } // Even if the logger is null, don't stop. We want to log the stack trace and // any other pertinent information to a .dmp file for crash diagnosis List<String> currentStacktrace = new ArrayList<String>(); currentStacktrace.add("Stack trace from crashLocalVoltDB() method:"); // Create a special dump file to hold the stack trace try { TimestampType ts = new TimestampType(new java.util.Date()); CatalogContext catalogContext = VoltDB.instance().getCatalogContext(); String root = catalogContext != null ? catalogContext.cluster.getVoltroot() + File.separator : ""; PrintWriter writer = new PrintWriter(root + "voltdb_crash" + ts.toString().replace(' ', '-') + ".txt"); writer.println("Time: " + ts); writer.println("Message: " + errMsg); writer.println(); writer.println("Platform Properties:"); PlatformProperties pp = PlatformProperties.getPlatformProperties(); String[] lines = pp.toLogLines().split("\n"); for (String line : lines) { writer.println(line.trim()); } if (thrown != null) { writer.println(); writer.println("****** Exception Thread ****** "); thrown.printStackTrace(writer); } printStackTraces(writer, currentStacktrace); writer.close(); } catch (Throwable err) { // shouldn't fail, but.. err.printStackTrace(); } VoltLogger log = null; try { log = new VoltLogger("HOST"); } catch (RuntimeException rt_ex) { /* ignore */ } if (log != null) { log.fatal(errMsg); if (thrown != null) { if (stackTrace) { log.fatal("Fatal exception", thrown); } else { log.fatal(thrown.toString()); } } else { if (stackTrace) { for (String currentStackElem : currentStacktrace) { log.fatal(currentStackElem); } } } } else { System.err.println(errMsg); if (thrown != null) { if (stackTrace) { thrown.printStackTrace(); } else { System.err.println(thrown.toString()); } } else { if (stackTrace) { for (String currentStackElem : currentStacktrace) { System.err.println(currentStackElem); } } } } System.err.println("VoltDB has encountered an unrecoverable error and is exiting."); System.err.println("The log may contain additional information."); System.exit(-1); } /* * For tests that causes failures, * allow them stop the crash and inspect. */ public static boolean ignoreCrash = false; public static boolean wasCrashCalled = false; public static String crashMessage; /** * Exit the process with an error message, optionally with a stack trace. * Also notify all connected peers that the node is going down. */ public static void crashGlobalVoltDB(String errMsg, boolean stackTrace, Throwable t) { wasCrashCalled = true; crashMessage = errMsg; if (ignoreCrash) { throw new AssertionError("Faux crash of VoltDB successful."); } try { instance().getHostMessenger().sendPoisonPill(errMsg); } catch (Exception e) { e.printStackTrace(); } try { Thread.sleep(500); } catch (InterruptedException e) {} crashLocalVoltDB(errMsg, stackTrace, t); } /** * Entry point for the VoltDB server process. * @param args Requires catalog and deployment file locations. */ public static void main(String[] args) { //Thread.setDefaultUncaughtExceptionHandler(new VoltUncaughtExceptionHandler()); Configuration config = new Configuration(args); try { if (!config.validate()) { config.usage(); System.exit(-1); } else { initialize(config); instance().run(); } } catch (OutOfMemoryError e) { String errmsg = "VoltDB Main thread: ran out of Java memory. This node will shut down."; VoltDB.crashLocalVoltDB(errmsg, false, e); } } /** * Initialize the VoltDB server. * @param config The VoltDB.Configuration to use to initialize the server. */ public static void initialize(VoltDB.Configuration config) { m_config = config; instance().initialize(config); } /** * Retrieve a reference to the object implementing VoltDBInterface. When * running a real server (and not a test harness), this instance will only * be useful after calling VoltDB.initialize(). * * @return A reference to the underlying VoltDBInterface object. */ public static VoltDBInterface instance() { return singleton; } /** * Useful only for unit testing. * * Replace the default VoltDB server instance with an instance of * VoltDBInterface that is used for testing. * */ public static void replaceVoltDBInstanceForTest(VoltDBInterface testInstance) { singleton = testInstance; } @Override public Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException(); } private static VoltDB.Configuration m_config = new VoltDB.Configuration(); private static VoltDBInterface singleton = new RealVoltDB(); }
Add new start action "join". Currently very simple, will be polished to support different joins later.
src/frontend/org/voltdb/VoltDB.java
Add new start action "join". Currently very simple, will be polished to support different joins later.
Java
apache-2.0
d93bd5cb0f119560c6087caed10e4ad9c999886e
0
apache/juddi,apache/juddi,apache/juddi,apache/juddi,apache/juddi
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 2001-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "jUDDI" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.juddi.uddi4j; import java.util.Vector; import org.uddi4j.UDDIException; import org.uddi4j.datatype.tmodel.TModel; import org.uddi4j.response.TModelDetail; import org.uddi4j.transport.TransportException; import org.uddi4j.util.FindQualifier; import org.uddi4j.util.FindQualifiers; import org.uddi4j.util.CategoryBag; import org.uddi4j.util.KeyedReference; public class TestTModel extends UDDITestBase { public void testCases() { _testNoNameQualifier(); } public void _testEmptyFindQualifier() { Vector tmods = new Vector(); TModel tModel = new TModel(); tModel.setName("AnimalProtocol"); TModelDetail tmodDetail = null; tmods.add(tModel); try { tmodDetail = proxy.save_tModel(token.getAuthInfoString(),tmods); assertTrue(_queryEmptyQualifiers()) ; } catch (TransportException ex) { fail(ex.toString()); } catch (UDDIException ex) { fail(ex.toString()); } finally { cleanupTModels(tmodDetail); } } public boolean _queryEmptyQualifiers() { FindQualifiers findQualifiers = new FindQualifiers(); FindQualifier findQualifier = new FindQualifier(); findQualifiers.add(findQualifier); try { proxy.find_tModel("AnimalProtocol", null, null, findQualifiers, 5); } catch (UDDIException ex) { return false; } catch (TransportException ex) { return false; } return true; } public boolean _testNoNameQualifier() { Vector tmods = new Vector(); TModel tModel = new TModel(); tModel.setName("PuffyProtocol"); TModelDetail tmodDetail = null; tmods.add(tModel); try { tmodDetail = proxy.save_tModel(token.getAuthInfoString(), tmods); assertTrue(_queryNoNameQualifiers()); } catch (TransportException ex) { fail(ex.toString()); } catch (UDDIException ex) { fail(ex.toString()); } finally { cleanupTModels(tmodDetail); } return true; } public boolean _queryNoNameQualifiers() { CategoryBag catBag = new CategoryBag(); // one of the string is supposed to be empty, but i'm not sure which // keyName .. keyValue .. tModelKey .. not sure this guy has his stuff right .. KeyedReference kref = new KeyedReference(TModel.TYPES_TMODEL_KEY,"category", ""); // KeyedReference kref = new KeyedReference("", "category", TModel.TYPES_TMODEL_KEY); catBag.add(kref); try { proxy.find_tModel("", catBag, null, null, 5); } catch (UDDIException ex) { return false; } catch (TransportException ex) { return false; } return true; } }
test/src/uddi4j/org/apache/juddi/uddi4j/TestTModel.java
/* * The Apache Software License, Version 1.1 * * * Copyright (c) 2001-2003 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "jUDDI" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact apache@apache.org. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package org.apache.juddi.uddi4j; import java.util.Vector; import org.uddi4j.UDDIException; import org.uddi4j.datatype.tmodel.TModel; import org.uddi4j.response.TModelDetail; import org.uddi4j.transport.TransportException; import org.uddi4j.util.FindQualifier; import org.uddi4j.util.FindQualifiers; public class TestTModel extends UDDITestBase { public void testEmptyFindQualifier() { Vector tmods = new Vector(); TModel tModel = new TModel(); tModel.setName("AnimalProtocol"); TModelDetail tmodDetail = null; tmods.add(tModel); try { tmodDetail = proxy.save_tModel(token.getAuthInfoString(),tmods); assertTrue(queryEmptyQualifiers()) ; } catch (TransportException ex) { fail(ex.toString()); } catch (UDDIException ex) { fail(ex.toString()); } finally { cleanupTModels(tmodDetail); } } public boolean queryEmptyQualifiers() { FindQualifiers findQualifiers = new FindQualifiers(); FindQualifier findQualifier = new FindQualifier(); findQualifiers.add(findQualifier); try { proxy.find_tModel("AnimalProtocol", null, null, findQualifiers, 5); } catch (UDDIException ex) { return false; } catch (TransportException ex) { return false; } return true; } }
*** empty log message *** git-svn-id: 9afb1f5cc321249b413187f68ffea1707b8f9361@262644 13f79535-47bb-0310-9956-ffa450edef68
test/src/uddi4j/org/apache/juddi/uddi4j/TestTModel.java
*** empty log message ***
Java
apache-2.0
65afef2c1eebf2d450dbadc8c30e93fca967cb72
0
angelndevil2/LoadT,angelndevil2/LoadT,angelndevil2/LoadT
package com.github.angelndevil2.loadt; import com.github.angelndevil2.loadt.common.HTTPMethod; import com.github.angelndevil2.loadt.common.JMeterCalculator; import com.github.angelndevil2.loadt.common.LoadTException; import com.github.angelndevil2.loadt.common.SystemInfoCollector; import com.github.angelndevil2.loadt.jetty.JettyServer; import com.github.angelndevil2.loadt.jetty.PropList; import com.github.angelndevil2.loadt.listener.CSVFileSaver; import com.github.angelndevil2.loadt.listener.ConsoleResultViewer; import com.github.angelndevil2.loadt.listener.ConsoleStatisticViewer; import com.github.angelndevil2.loadt.listener.IResultSaver; import com.github.angelndevil2.loadt.loadmanager.LoadManagerType; import com.github.angelndevil2.loadt.util.PropertiesUtil; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.client.ContentExchange; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpExchange; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; import static org.junit.Assert.assertEquals; /** * @author k, Created on 16. 2. 6. */ @Slf4j public class LoadTTest { private static Properties jettyProp; private final static LoadT loadT = new LoadT(); // set LoadManager private final static String name = "Test load manager"; private final static JettyServer server = new JettyServer(); @BeforeClass public static void startJettyServer() { server.run(); } @BeforeClass public static void initValues() { try { PropertiesUtil.setDirs("src/dist"); } catch (IOException e) { log.error("base directory setting error.", e); } jettyProp = new Properties(); try { jettyProp.load(new FileInputStream(PropertiesUtil.getJettyPropertiesFile())); } catch (IOException e) { log.error("error loading jetty properties.", e); } try { loadT.addLoadManager(name, LoadManagerType.JMETER); } catch (LoadTException e) { log.error("{} already exits", name, e); } } @AfterClass public static void stopJettyServer() throws Exception { server.stop(); } @Test public void testLoadT() throws LoadTException, IOException, InterruptedException { // set options // use keepAlive loadT.setHttpKeepAlive(name, true); // follow redirect loadT.setHttpFollowRedirect(name, true); // set loop count 10 loadT.setLoopCount(name,1); // set number of thread 100 loadT.setNumberOfThread(name, 1); // add http sampler loadT.addHttpSampler(name, "test", "localhost", Integer.valueOf((String)jettyProp.get(PropList.HTTP_PORT)), "/", HTTPMethod.GET, "localhost"); // add system information collector with domain "192.168.100.241" SystemInfoCollector systemInfoCollector = new SystemInfoCollector("localhost"); loadT.addSystemInfoCollector(systemInfoCollector); loadT.startSystemInfoCollectors(); // add console viewer loadT.addListener(name, new ConsoleResultViewer()); loadT.setViewInterval(1000); loadT.addCalculator(name, new JMeterCalculator("TOTAL")); loadT.addStatisticSampleListener(name, "TOTAL", new ConsoleStatisticViewer()); loadT.setSaveInterval(1000); IResultSaver saver = new CSVFileSaver("test.csv"); loadT.addResultSaver(name, saver); // result saver is also IResultListener, so register with addStatisticSampleListener or addListener. loadT.addStatisticSampleListener(name, "TOTAL", saver); // run test loadT.runTestAll(); } @Test public void getLoadTInfoFromEmbeddedTest() throws Exception { HttpClient client = new HttpClient(); client.start(); ContentExchange exchange = new ContentExchange(true); exchange.setURL("http://localhost:1080/LoadT/info"); client.send(exchange); // Waits until the exchange is terminated int exchangeState = exchange.waitForDone(); if (exchangeState == HttpExchange.STATUS_COMPLETED) { assertEquals(200, exchange.getResponseStatus()); System.out.println(exchange.getResponseContent()); } exchange.reset(); exchange.setURL("http://localhost:1080/LoadT/load-managers"); client.send(exchange); // Waits until the exchange is terminated exchangeState = exchange.waitForDone(); if (exchangeState == HttpExchange.STATUS_COMPLETED) { assertEquals(200, exchange.getResponseStatus()); System.out.println(exchange.getResponseContent()); } /*else if (exchangeState == HttpExchange.STATUS_EXCEPTED) handleError(); else if (exchangeState == HttpExchange.STATUS_EXPIRED) handleSlowServer();*/ } }
src/test/java/com/github/angelndevil2/loadt/LoadTTest.java
package com.github.angelndevil2.loadt; import com.github.angelndevil2.loadt.common.HTTPMethod; import com.github.angelndevil2.loadt.common.JMeterCalculator; import com.github.angelndevil2.loadt.common.LoadTException; import com.github.angelndevil2.loadt.common.SystemInfoCollector; import com.github.angelndevil2.loadt.jetty.JettyServer; import com.github.angelndevil2.loadt.jetty.PropList; import com.github.angelndevil2.loadt.listener.CSVFileSaver; import com.github.angelndevil2.loadt.listener.ConsoleResultViewer; import com.github.angelndevil2.loadt.listener.ConsoleStatisticViewer; import com.github.angelndevil2.loadt.listener.IResultSaver; import com.github.angelndevil2.loadt.loadmanager.LoadManagerType; import com.github.angelndevil2.loadt.util.PropertiesUtil; import lombok.extern.slf4j.Slf4j; import org.eclipse.jetty.client.ContentExchange; import org.eclipse.jetty.client.HttpClient; import org.eclipse.jetty.client.HttpExchange; import org.junit.Test; import java.io.FileInputStream; import java.io.IOException; import java.util.Properties; /** * @author k, Created on 16. 2. 6. */ @Slf4j public class LoadTTest { static final Properties jettyProp; static { try { PropertiesUtil.setDirs("src/dist"); } catch (IOException e) { log.error("base directory setting error.", e); } jettyProp = new Properties(); try { jettyProp.load(new FileInputStream(PropertiesUtil.getJettyPropertiesFile())); } catch (IOException e) { log.error("error loading jetty properties.", e); } } static LoadT loadT = new LoadT(); // set LoadManager final static String name = "Test load manager"; static { try { loadT.addLoadManager(name, LoadManagerType.JMETER); } catch (LoadTException e) { log.error("{} already exits", name, e); } } static { new JettyServer().run(); } @Test public void testLoadT() throws LoadTException, IOException, InterruptedException { // set options // use keepAlive loadT.setHttpKeepAlive(name, true); // follow redirect loadT.setHttpFollowRedirect(name, true); // set loop count 10 loadT.setLoopCount(name,1); // set number of thread 100 loadT.setNumberOfThread(name, 1); // add http sampler loadT.addHttpSampler(name, "test", "localhost", Integer.valueOf((String)jettyProp.get(PropList.HTTP_PORT)), "/", HTTPMethod.GET, "localhost"); // add system information collector with domain "192.168.100.241" SystemInfoCollector systemInfoCollector = new SystemInfoCollector("localhost"); loadT.addSystemInfoCollector(systemInfoCollector); loadT.startSystemInfoCollectors(); // add console viewer loadT.addListener(name, new ConsoleResultViewer()); loadT.setViewInterval(1000); loadT.addCalculator(name, new JMeterCalculator("TOTAL")); loadT.addStatisticSampleListener(name, "TOTAL", new ConsoleStatisticViewer()); loadT.setSaveInterval(1000); IResultSaver saver = new CSVFileSaver("test.csv"); loadT.addResultSaver(name, saver); // result saver is also IResultListener, so register with addStatisticSampleListener or addListener. loadT.addStatisticSampleListener(name, "TOTAL", saver); // run test loadT.runTestAll(); } @Test public void getLoadTInfoFromEmbeddedTest() throws Exception { HttpClient client = new HttpClient(); client.start(); ContentExchange exchange = new ContentExchange(true); exchange.setURL("http://localhost:1080/LoadT/info"); client.send(exchange); // Waits until the exchange is terminated int exchangeState = exchange.waitForDone(); if (exchangeState == HttpExchange.STATUS_COMPLETED) System.out.println(exchange.getResponseContent()); exchange.reset(); exchange.setURL("http://localhost:1080/LoadT/load-managers"); client.send(exchange); // Waits until the exchange is terminated exchangeState = exchange.waitForDone(); if (exchangeState == HttpExchange.STATUS_COMPLETED) System.out.println(exchange.getResponseContent()); /*else if (exchangeState == HttpExchange.STATUS_EXCEPTED) handleError(); else if (exchangeState == HttpExchange.STATUS_EXPIRED) handleSlowServer();*/ } }
static init to @BeforeClass
src/test/java/com/github/angelndevil2/loadt/LoadTTest.java
static init to @BeforeClass
Java
apache-2.0
8f3a7ceb042da05dea403443ce07225c4058b703
0
CesarPantoja/jena,adrapereira/jena,adrapereira/jena,CesarPantoja/jena,adrapereira/jena,jianglili007/jena,jianglili007/jena,kamir/jena,tr3vr/jena,samaitra/jena,apache/jena,atsolakid/jena,jianglili007/jena,CesarPantoja/jena,samaitra/jena,adrapereira/jena,CesarPantoja/jena,CesarPantoja/jena,tr3vr/jena,kidaa/jena,kamir/jena,kamir/jena,jianglili007/jena,CesarPantoja/jena,samaitra/jena,kidaa/jena,kidaa/jena,tr3vr/jena,atsolakid/jena,samaitra/jena,tr3vr/jena,atsolakid/jena,jianglili007/jena,atsolakid/jena,adrapereira/jena,jianglili007/jena,samaitra/jena,apache/jena,atsolakid/jena,adrapereira/jena,kamir/jena,apache/jena,kidaa/jena,kamir/jena,atsolakid/jena,atsolakid/jena,apache/jena,samaitra/jena,apache/jena,samaitra/jena,jianglili007/jena,kamir/jena,kidaa/jena,tr3vr/jena,tr3vr/jena,kidaa/jena,apache/jena,CesarPantoja/jena,apache/jena,kidaa/jena,tr3vr/jena,apache/jena,adrapereira/jena,kamir/jena
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hp.hpl.jena.tdb.base.file ; import java.io.File ; import java.io.IOException ; import org.apache.jena.atlas.lib.Lib ; import com.hp.hpl.jena.tdb.sys.Names ; /** * Wrapper for a file system directory; can create filenames in that directory. * Enforces some simple consistency policies and provides a "typed string" for a * filename to reduce errors. */ public class Location { static String pathSeparator = File.separator ; // Or just "/" private static String memNamePath = Names.memName+pathSeparator ; private String pathname ; private MetaFile metafile = null ; private boolean isMem = false ; private boolean isMemUnique = false ; private LocationLock lock ; static int memoryCount = 0 ; /** * Return a fresh memory location : always unique, never .equals to another * location. */ static public Location mem() { return mem(null) ; } /** Return a memory location with a name */ static public Location mem(String name) { Location loc = Location.mem() ; memInit(loc, name) ; return loc ; } /** Return a location for a directory on disk */ static public Location create(String directoryName) { Location loc = new Location(directoryName) ; return loc ; } private Location() {} private static void memInit(Location location, String name) { location.pathname = Names.memName ; if ( name != null ) { name = name.replace('\\', '/') ; location.pathname = location.pathname + '/' + name ; } else location.isMemUnique = true ; if ( !location.pathname.endsWith(pathSeparator) ) location.pathname = location.pathname + '/' ; location.isMem = true ; location.metafile = new MetaFile(Names.memName, Names.memName) ; location.lock = new LocationLock(location); } /** @deprecated Use{@linkplain Location#create(String)} */ @Deprecated public Location(String rootname) { super() ; if ( rootname.equals(Names.memName) ) { memInit(this, null) ; return ; } if ( rootname.startsWith(memNamePath) ) { String name = rootname.substring(memNamePath.length()) ; memInit(this, name) ; return ; } ensure(rootname) ; pathname = fixupName(rootname) ; // Metafilename for a directory. String metafileName = getPath(Names.directoryMetafile, Names.extMeta) ; metafile = new MetaFile("Location: " + rootname, metafileName) ; // Set up locking // Note that we don't check the lock in any way at this point, checking // and obtaining the lock is carried out by StoreConnection lock = new LocationLock(this); } // MS Windows: // getCanonicalPath is only good enough for existing files. // It leaves the case as it finds it (upper, lower) and lower cases // not-existing segments. But later creation of a segment with uppercase // changes the exact string returned. private String fixupName(String fsName) { if ( isMem() ) return fsName ; File file = new File(fsName) ; try { fsName = file.getCanonicalPath() ; } catch (IOException ex) { throw new FileException("Failed to get canoncial path: " + file.getAbsolutePath(), ex) ; } if ( !fsName.endsWith(File.separator) && !fsName.endsWith(pathSeparator) ) fsName = fsName + pathSeparator ; return fsName ; } public String getDirectoryPath() { return pathname ; } public MetaFile getMetaFile() { return metafile ; } public boolean isMem() { return isMem ; } public boolean isMemUnique() { return isMemUnique ; } public LocationLock getLock() { return lock; } public Location getSubLocation(String dirname) { String newName = pathname + dirname ; ensure(newName) ; return Location.create(newName) ; } private void ensure(String dirname) { if ( isMem() ) return ; File file = new File(dirname) ; if ( file.exists() && !file.isDirectory() ) throw new FileException("Existing file: " + file.getAbsolutePath()) ; if ( !file.exists() ) file.mkdir() ; } public String getSubDirectory(String dirname) { return getSubLocation(dirname).getDirectoryPath() ; } /** * Return an absolute filename where relative names are resolved from the * location */ public String absolute(String filename, String extension) { return (extension == null) ? absolute(filename) : absolute(filename + "." + extension) ; } /** * Return an absolute filename where relative names are resolved from the * location */ public String absolute(String filename) { File f = new File(filename) ; // Location relative. if ( !f.isAbsolute() ) filename = pathname + filename ; return filename ; } /** Does the location exist (and it a directory, and is accessible) */ public boolean exists() { File f = new File(getDirectoryPath()) ; return f.exists() && f.isDirectory() && f.canRead() ; } public boolean exists(String filename) { return exists(filename, null) ; } public boolean exists(String filename, String ext) { String fn = getPath(filename, ext) ; File f = new File(fn) ; return f.exists() ; } /** Return the name of the file relative to this location */ public String getPath(String filename) { return getPath(filename, null) ; } /** Return the name of the file, and extension, relative to this location */ public String getPath(String filename, String ext) { check(filename, ext) ; if ( ext == null ) return pathname + filename ; return pathname + filename + "." + ext ; } private void check(String filename, String ext) { if ( filename == null ) throw new FileException("Location: null filename") ; if ( filename.contains("/") || filename.contains("\\") ) throw new FileException("Illegal file component name: " + filename) ; if ( filename.contains(".") && ext != null ) throw new FileException("Filename has an extension: " + filename) ; if ( ext != null ) { if ( ext.contains(".") ) throw new FileException("Extension has an extension: " + filename) ; } } @Override public int hashCode() { final int prime = 31 ; int result = isMem ? 1 : 2 ; result = prime * result + ((pathname == null) ? 0 : pathname.hashCode()) ; return result ; } @Override public boolean equals(Object obj) { if ( this == obj ) return true ; if ( obj == null ) return false ; if ( getClass() != obj.getClass() ) return false ; Location other = (Location)obj ; if ( isMem && !other.isMem ) return false ; if ( !isMem && other.isMem ) return false ; // Not == so ... if ( isMemUnique ) return false ; return Lib.equal(pathname, other.pathname) ; } @Override public String toString() { return "location:" + pathname ; } }
jena-tdb/src/main/java/com/hp/hpl/jena/tdb/base/file/Location.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hp.hpl.jena.tdb.base.file ; import java.io.File ; import java.io.IOException ; import org.apache.jena.atlas.lib.Lib ; import com.hp.hpl.jena.tdb.sys.Names ; /** * Wrapper for a file system directory; can create filenames in that directory. * Enforces some simple consistency policies and provides a "typed string" for a * filename to reduce errors. */ public class Location { static String pathSeparator = File.separator ; // Or just "/" private static String memNamePath = Names.memName+pathSeparator ; private String pathname ; private MetaFile metafile = null ; private boolean isMem = false ; private boolean isMemUnique = false ; private LocationLock lock ; static int memoryCount = 0 ; /** * Return a fresh memory location : always unique, never .equals to another * location. */ static public Location mem() { return mem(null) ; } /** Return a memory location with a name */ static public Location mem(String name) { Location loc = Location.mem() ; memInit(loc, name) ; return loc ; } /** Return a location for a directory on disk */ static public Location create(String directoryName) { Location loc = new Location(directoryName) ; return loc ; } private Location() {} private static void memInit(Location location, String name) { location.pathname = Names.memName ; if ( name != null ) { name = name.replace('\\', '/') ; location.pathname = location.pathname + '/' + name ; } else location.isMemUnique = true ; if ( !location.pathname.endsWith(pathSeparator) ) location.pathname = location.pathname + '/' ; location.isMem = true ; location.metafile = new MetaFile(Names.memName, Names.memName) ; location.lock = new LocationLock(location); } public Location(String rootname) { super() ; if ( rootname.equals(Names.memName) ) { memInit(this, null) ; return ; } if ( rootname.startsWith(memNamePath) ) { String name = rootname.substring(memNamePath.length()) ; memInit(this, name) ; return ; } ensure(rootname) ; pathname = fixupName(rootname) ; // Metafilename for a directory. String metafileName = getPath(Names.directoryMetafile, Names.extMeta) ; metafile = new MetaFile("Location: " + rootname, metafileName) ; // Set up locking // Note that we don't check the lock in any way at this point, checking // and obtaining the lock is carried out by StoreConnection lock = new LocationLock(this); } // MS Windows: // getCanonicalPath is only good enough for existing files. // It leaves the case as it finds it (upper, lower) and lower cases // not-existing segments. But later creation of a segment with uppercase // changes the exact string returned. private String fixupName(String fsName) { if ( isMem() ) return fsName ; File file = new File(fsName) ; try { fsName = file.getCanonicalPath() ; } catch (IOException ex) { throw new FileException("Failed to get canoncial path: " + file.getAbsolutePath(), ex) ; } if ( !fsName.endsWith(File.separator) && !fsName.endsWith(pathSeparator) ) fsName = fsName + pathSeparator ; return fsName ; } public String getDirectoryPath() { return pathname ; } public MetaFile getMetaFile() { return metafile ; } public boolean isMem() { return isMem ; } public boolean isMemUnique() { return isMemUnique ; } public LocationLock getLock() { return lock; } public Location getSubLocation(String dirname) { String newName = pathname + dirname ; ensure(newName) ; return Location.create(newName) ; } private void ensure(String dirname) { if ( isMem() ) return ; File file = new File(dirname) ; if ( file.exists() && !file.isDirectory() ) throw new FileException("Existing file: " + file.getAbsolutePath()) ; if ( !file.exists() ) file.mkdir() ; } public String getSubDirectory(String dirname) { return getSubLocation(dirname).getDirectoryPath() ; } /** * Return an absolute filename where relative names are resolved from the * location */ public String absolute(String filename, String extension) { return (extension == null) ? absolute(filename) : absolute(filename + "." + extension) ; } /** * Return an absolute filename where relative names are resolved from the * location */ public String absolute(String filename) { File f = new File(filename) ; // Location relative. if ( !f.isAbsolute() ) filename = pathname + filename ; return filename ; } /** Does the location exist (and it a directory, and is accessible) */ public boolean exists() { File f = new File(getDirectoryPath()) ; return f.exists() && f.isDirectory() && f.canRead() ; } public boolean exists(String filename) { return exists(filename, null) ; } public boolean exists(String filename, String ext) { String fn = getPath(filename, ext) ; File f = new File(fn) ; return f.exists() ; } /** Return the name of the file relative to this location */ public String getPath(String filename) { return getPath(filename, null) ; } /** Return the name of the file, and extension, relative to this location */ public String getPath(String filename, String ext) { check(filename, ext) ; if ( ext == null ) return pathname + filename ; return pathname + filename + "." + ext ; } private void check(String filename, String ext) { if ( filename == null ) throw new FileException("Location: null filename") ; if ( filename.contains("/") || filename.contains("\\") ) throw new FileException("Illegal file component name: " + filename) ; if ( filename.contains(".") && ext != null ) throw new FileException("Filename has an extension: " + filename) ; if ( ext != null ) { if ( ext.contains(".") ) throw new FileException("Extension has an extension: " + filename) ; } } @Override public int hashCode() { final int prime = 31 ; int result = isMem ? 1 : 2 ; result = prime * result + ((pathname == null) ? 0 : pathname.hashCode()) ; return result ; } @Override public boolean equals(Object obj) { if ( this == obj ) return true ; if ( obj == null ) return false ; if ( getClass() != obj.getClass() ) return false ; Location other = (Location)obj ; if ( isMem && !other.isMem ) return false ; if ( !isMem && other.isMem ) return false ; // Not == so ... if ( isMemUnique ) return false ; return Lib.equal(pathname, other.pathname) ; } @Override public String toString() { return "location:" + pathname ; } }
Make constructor public, but deprecated, for compatibility.
jena-tdb/src/main/java/com/hp/hpl/jena/tdb/base/file/Location.java
Make constructor public, but deprecated, for compatibility.
Java
apache-2.0
8502319f81fe708eb93bdb9949a9bb8f34719512
0
mches/byte-buddy,raphw/byte-buddy,raphw/byte-buddy,raphw/byte-buddy,DALDEI/byte-buddy,CodingFabian/byte-buddy
package net.bytebuddy.agent.builder; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import net.bytebuddy.ByteBuddy; import net.bytebuddy.ClassFileVersion; import net.bytebuddy.asm.AsmVisitorWrapper; import net.bytebuddy.build.EntryPoint; import net.bytebuddy.build.Plugin; import net.bytebuddy.description.field.FieldDescription; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.method.ParameterDescription; import net.bytebuddy.description.modifier.*; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.dynamic.ClassFileLocator; import net.bytebuddy.dynamic.DynamicType; import net.bytebuddy.dynamic.NexusAccessor; import net.bytebuddy.dynamic.TypeResolutionStrategy; import net.bytebuddy.dynamic.loading.ClassInjector; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; import net.bytebuddy.dynamic.loading.ClassReloadingStrategy; import net.bytebuddy.dynamic.scaffold.InstrumentedType; import net.bytebuddy.dynamic.scaffold.inline.MethodNameTransformer; import net.bytebuddy.dynamic.scaffold.subclass.ConstructorStrategy; import net.bytebuddy.implementation.ExceptionMethod; import net.bytebuddy.implementation.Implementation; import net.bytebuddy.implementation.LoadedTypeInitializer; import net.bytebuddy.implementation.MethodCall; import net.bytebuddy.implementation.auxiliary.AuxiliaryType; import net.bytebuddy.implementation.bytecode.ByteCodeAppender; import net.bytebuddy.implementation.bytecode.Duplication; import net.bytebuddy.implementation.bytecode.StackManipulation; import net.bytebuddy.implementation.bytecode.TypeCreation; import net.bytebuddy.implementation.bytecode.assign.Assigner; import net.bytebuddy.implementation.bytecode.assign.TypeCasting; import net.bytebuddy.implementation.bytecode.collection.ArrayFactory; import net.bytebuddy.implementation.bytecode.constant.ClassConstant; import net.bytebuddy.implementation.bytecode.constant.IntegerConstant; import net.bytebuddy.implementation.bytecode.constant.TextConstant; import net.bytebuddy.implementation.bytecode.member.FieldAccess; import net.bytebuddy.implementation.bytecode.member.MethodInvocation; import net.bytebuddy.implementation.bytecode.member.MethodReturn; import net.bytebuddy.implementation.bytecode.member.MethodVariableAccess; import net.bytebuddy.matcher.ElementMatcher; import net.bytebuddy.matcher.LatentMatcher; import net.bytebuddy.pool.TypePool; import net.bytebuddy.utility.JavaConstant; import net.bytebuddy.utility.JavaModule; import net.bytebuddy.utility.JavaType; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import java.io.*; import java.lang.instrument.ClassDefinition; import java.lang.instrument.ClassFileTransformer; import java.lang.instrument.Instrumentation; import java.lang.instrument.UnmodifiableClassException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.ProtectionDomain; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static net.bytebuddy.matcher.ElementMatchers.*; /** * <p> * An agent builder provides a convenience API for defining a * <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/instrument/package-summary.html">Java agent</a>. By default, * this transformation is applied by rebasing the type if not specified otherwise by setting a * {@link TypeStrategy}. * </p> * <p> * When defining several {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s, the agent builder always * applies the transformers that were supplied with the last applicable matcher. Therefore, more general transformers * should be defined first. * </p> * <p> * <b>Note</b>: Any transformation is performed using the {@link AccessControlContext} of an agent's creator. * </p> */ public interface AgentBuilder { /** * Defines the given {@link net.bytebuddy.ByteBuddy} instance to be used by the created agent. * * @param byteBuddy The Byte Buddy instance to be used. * @return A new instance of this agent builder which makes use of the given {@code byteBuddy} instance. */ AgentBuilder with(ByteBuddy byteBuddy); /** * Defines the given {@link net.bytebuddy.agent.builder.AgentBuilder.Listener} to be notified by the created agent. * The given listener is notified after any other listener that is already registered. If a listener is registered * twice, it is also notified twice. * * @param listener The listener to be notified. * @return A new instance of this agent builder which creates an agent that informs the given listener about * events. */ AgentBuilder with(Listener listener); /** * Defines the use of the given type locator for locating a {@link TypeDescription} for an instrumented type. * * @param poolStrategy The type locator to use. * @return A new instance of this agent builder which uses the given type locator for looking up class files. */ AgentBuilder with(PoolStrategy poolStrategy); /** * Defines the use of the given location strategy for locating binary data to given class names. * * @param locationStrategy The location strategy to use. * @return A new instance of this agent builder which uses the given location strategy for looking up class files. */ AgentBuilder with(LocationStrategy locationStrategy); /** * Defines how types should be transformed, e.g. if they should be rebased or redefined by the created agent. * * @param typeStrategy The type strategy to use. * @return A new instance of this agent builder which uses the given type strategy. */ AgentBuilder with(TypeStrategy typeStrategy); /** * Defines a given initialization strategy to be applied to generated types. An initialization strategy is responsible * for setting up a type after it was loaded. This initialization must be performed after the transformation because * a Java agent is only invoked before loading a type. By default, the initialization logic is added to a class's type * initializer which queries a global object for any objects that are to be injected into the generated type. * * @param initializationStrategy The initialization strategy to use. * @return A new instance of this agent builder that applies the given initialization strategy. */ AgentBuilder with(InitializationStrategy initializationStrategy); /** * <p> * Specifies a strategy for modifying types that were already loaded prior to the installation of this transformer. * </p> * <p> * <b>Note</b>: Defining a redefinition strategy resets any refinements of a previously set redefinition strategy * via {@link Redefining}. * </p> * <p> * <b>Important</b>: Most JVMs do not support changes of a class's structure after a class was already * loaded. Therefore, it is typically required that this class file transformer was built while enabling * {@link AgentBuilder#disableClassFormatChanges()}. * </p> * * @param redefinitionStrategy The redefinition strategy to apply. * @return A new instance of this agent builder that applies the given redefinition strategy. */ Redefining with(RedefinitionStrategy redefinitionStrategy); /** * <p> * Enables or disables management of the JVM's {@code LambdaMetafactory} which is responsible for creating classes that * implement lambda expressions. Without this feature enabled, classes that are represented by lambda expressions are * not instrumented by the JVM such that Java agents have no effect on them when a lambda expression's class is loaded * for the first time. * </p> * <p> * When activating this feature, Byte Buddy instruments the {@code LambdaMetafactory} and takes over the responsibility * of creating classes that represent lambda expressions. In doing so, Byte Buddy has the opportunity to apply the built * class file transformer. If the current VM does not support lambda expressions, activating this feature has no effect. * </p> * <p> * <b>Important</b>: If this feature is active, it is important to release the built class file transformer when * deactivating it. Normally, it is sufficient to call {@link Instrumentation#removeTransformer(ClassFileTransformer)}. * When this feature is enabled, it is however also required to invoke * {@link LambdaInstrumentationStrategy#release(ClassFileTransformer, Instrumentation)}. Otherwise, the executing VMs class * loader retains a reference to the class file transformer what can cause a memory leak. * </p> * * @param lambdaInstrumentationStrategy {@code true} if this feature should be enabled. * @return A new instance of this agent builder where this feature is explicitly enabled or disabled. */ AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy); /** * Specifies a strategy to be used for resolving {@link TypeDescription} for any type handled by the created transformer. * * @param descriptionStrategy The description strategy to use. * @return A new instance of this agent builder that applies the given description strategy. */ AgentBuilder with(DescriptionStrategy descriptionStrategy); /** * Specifies an installation strategy that this agent builder applies upon installing an agent. * * @param installationStrategy The installation strategy to be used. * @return A new agent builder that applies the supplied installation strategy. */ AgentBuilder with(InstallationStrategy installationStrategy); /** * Specifies a fallback strategy to that this agent builder applies upon installing an agent and during class file transformation. * * @param fallbackStrategy The fallback strategy to be used. * @return A new agent builder that applies the supplied fallback strategy. */ AgentBuilder with(FallbackStrategy fallbackStrategy); /** * Enables class injection of auxiliary classes into the bootstrap class loader. * * @param instrumentation The instrumentation instance that is used for appending jar files to the * bootstrap class path. * @param folder The folder in which jar files of the injected classes are to be stored. * @return An agent builder with bootstrap class loader class injection enabled. */ AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder); /** * Enables the use of the given native method prefix for instrumented methods. Note that this prefix is also * applied when preserving non-native methods. The use of this prefix is also registered when installing the * final agent with an {@link java.lang.instrument.Instrumentation}. * * @param prefix The prefix to be used. * @return A new instance of this agent builder which uses the given native method prefix. */ AgentBuilder enableNativeMethodPrefix(String prefix); /** * Disables the use of a native method prefix for instrumented methods. * * @return A new instance of this agent builder which does not use a native method prefix. */ AgentBuilder disableNativeMethodPrefix(); /** * Disables injection of auxiliary classes into the bootstrap class path. * * @return A new instance of this agent builder which does not apply bootstrap class loader injection. */ AgentBuilder disableBootstrapInjection(); /** * <p> * Disables all implicit changes on a class file that Byte Buddy would apply for certain instrumentations. When * using this option, it is no longer possible to rebase a method, i.e. intercepted methods are fully replaced. Furthermore, * it is no longer possible to implicitly apply loaded type initializers for explicitly initializing the generated type. * </p> * <p> * This is equivalent to setting {@link InitializationStrategy.NoOp} and {@link TypeStrategy.Default#REDEFINE_DECLARED_ONLY} * as well as configuring the underlying {@link ByteBuddy} instance to use a {@link net.bytebuddy.implementation.Implementation.Context.Disabled}. * </p> * * @return A new instance of this agent builder that does not apply any implicit changes to the received class file. */ AgentBuilder disableClassFormatChanges(); /** * Assures that all modules of the supplied types are read by the module of any instrumented type. If the current VM does not support * the Java module system, calling this method has no effect and this instance is returned. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param type The types for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type); /** * Assures that all supplied modules are read by the module of any instrumented type. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param module The modules for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module); /** * Assures that all supplied modules are read by the module of any instrumented type. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param modules The modules for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules); /** * Assures that all modules of the supplied types are read by the module of any instrumented type and vice versa. * If the current VM does not support the Java module system, calling this method has no effect and this instance is returned. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param type The types for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type); /** * Assures that all supplied modules are read by the module of any instrumented type and vice versa. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param module The modules for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module); /** * Assures that all supplied modules are read by the module of any instrumented type and vice versa. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param modules The modules for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it is * also recommended, to exclude class loaders such as for example the bootstrap class loader by using * {@link AgentBuilder#type(ElementMatcher, ElementMatcher)} instead. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being loaded that * decides if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should * be applied for that type. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when the given {@code typeMatcher} * indicates a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being * loaded that decides if the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be applied for * that type. * @param classLoaderMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the * {@link java.lang.ClassLoader} that is loading the type being loaded. This matcher * is always applied first where the type matcher is not applied in case that this * matcher does not indicate a match. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when both the given * {@code typeMatcher} and {@code classLoaderMatcher} indicate a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being * loaded that decides if the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be applied for * that type. * @param classLoaderMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the * {@link java.lang.ClassLoader} that is loading the type being loaded. This matcher * is always applied second where the type matcher is not applied in case that this * matcher does not indicate a match. * @param moduleMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the {@link JavaModule} * of the type being loaded. This matcher is always applied first where the class loader and * type matchers are not applied in case that this matcher does not indicate a match. On a JVM * that does not support the Java modules system, this matcher is not applied. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when both the given * {@code typeMatcher} and {@code classLoaderMatcher} indicate a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param matcher A matcher that decides if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be * applied for a type that is being loaded. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when the given {@code matcher} * indicates a match. */ Identified.Narrowable type(RawMatcher matcher); /** * <p> * Excludes any type that is matched by the provided matcher from instrumentation and considers types by all {@link ClassLoader}s. * By default, Byte Buddy does not instrument synthetic types or types that are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher); /** * <p> * Excludes any type that is matched by the provided matcher and is loaded by a class loader matching the second matcher. * By default, Byte Buddy does not instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that * are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @param classLoaderMatcher A matcher that identifies a class loader that identifies classes that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * <p> * Excludes any type that is matched by the provided matcher and is loaded by a class loader matching the second matcher. * By default, Byte Buddy does not instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that * are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @param classLoaderMatcher A matcher that identifies a class loader that identifies classes that should not be instrumented. * @param moduleMatcher A matcher that identifies a module that identifies classes that should not be instrumented. On a JVM * that does not support the Java modules system, this matcher is not applied. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * <p> * Excludes any type that is matched by the raw matcher provided to this method. By default, Byte Buddy does not * instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param rawMatcher A raw matcher that identifies types that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(RawMatcher rawMatcher); /** * Creates a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of this * agent builder. * * @return A class file transformer that implements the configuration of this agent builder. */ ResettableClassFileTransformer makeRaw(); /** * <p> * Creates and installs a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of * this agent builder with a given {@link java.lang.instrument.Instrumentation}. If retransformation is enabled, * the installation also causes all loaded types to be retransformed. * </p> * <p> * If installing the created class file transformer causes an exception to be thrown, the consequences of this * exception are determined by the {@link InstallationStrategy} of this builder. * </p> * * @param instrumentation The instrumentation on which this agent builder's configuration is to be installed. * @return The installed class file transformer. */ ResettableClassFileTransformer installOn(Instrumentation instrumentation); /** * Creates and installs a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of * this agent builder with the Byte Buddy-agent which must be installed prior to calling this method. * * @return The installed class file transformer. * @see AgentBuilder#installOn(Instrumentation) */ ResettableClassFileTransformer installOnByteBuddyAgent(); /** * An abstraction for extending a matcher. * * @param <T> The type that is produced by chaining a matcher. */ interface Matchable<T extends Matchable<T>> { /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. When matching a * type, class loaders are not considered. * * @param typeMatcher A matcher for the type being matched. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @param moduleMatcher A matcher for the type's module. On a JVM that does not support modules, the Java module is represented by {@code null}. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param rawMatcher A raw matcher for the type being matched. * @return A chained matcher. */ T and(RawMatcher rawMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. When matching a * type, the class loader is not considered. * * @param typeMatcher A matcher for the type being matched. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @param moduleMatcher A matcher for the type's module. On a JVM that does not support modules, the Java module is represented by {@code null}. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param rawMatcher A raw matcher for the type being matched. * @return A chained matcher. */ T or(RawMatcher rawMatcher); /** * An abstract base implementation of a matchable. * * @param <S> The type that is produced by chaining a matcher. */ abstract class AbstractBase<S extends Matchable<S>> implements Matchable<S> { @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher) { return and(typeMatcher, any()); } @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return and(typeMatcher, classLoaderMatcher, any()); } @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return and(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, moduleMatcher)); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher) { return or(typeMatcher, any()); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return or(typeMatcher, classLoaderMatcher, any()); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return or(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, moduleMatcher)); } } } /** * Allows to further specify ignored types. */ interface Ignored extends Matchable<Ignored>, AgentBuilder { /* this is merely a unionizing interface that does not declare methods */ } /** * Describes an {@link net.bytebuddy.agent.builder.AgentBuilder} which was handed a matcher for identifying * types to instrumented in order to supply one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s. */ interface Identified { /** * Applies the given transformer for the already supplied matcher. * * @param transformer The transformer to apply. * @return A new instance of this agent builder with the transformer being applied when the previously supplied matcher * identified a type for instrumentation which also allows for the registration of subsequent transformers. */ Extendable transform(Transformer transformer); /** * Allows to specify a type matcher for a type to instrument. */ interface Narrowable extends Matchable<Narrowable>, Identified { /* this is merely a unionizing interface that does not declare methods */ } /** * This interface is used to allow for optionally providing several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} to applied when a matcher identifies a type * to be instrumented. Any subsequent transformers are applied in the order they are registered. */ interface Extendable extends AgentBuilder, Identified { /** * <p> * Applies the specified transformation as a decorative transformation. For a decorative transformation, the supplied * transformer is prepended to any previous transformation that also matches the instrumented type, i.e. both transformations * are supplied. This procedure is repeated until a transformer is reached that matches the instrumented type but is not * defined as decorating after which no further transformations are considered. If all matching transformations are declared * as decorating, all matching transformers are applied. * </p> * <p> * <b>Note</b>: A decorating transformer is applied <b>after</b> previously registered transformers. * </p> * * @return A new instance of this agent builder with the specified transformation being applied as a decorator. */ AgentBuilder asDecorator(); } } /** * An agent builder that allows the configuration of how to apply a {@link RedefinitionStrategy}. Such a configuration * is only applied if the redefinition strategy is alive. */ interface Redefining extends AgentBuilder { /** * A batch allocator is responsible for diving a redefining of existing types into several chunks. This allows * to narrow down errors for the redefining of specific types or to apply a {@link RedefinitionStrategy.Listener} * action between chunks. * * @param redefinitionBatchAllocator The batch allocator to use. * @return A new instance of this agent builder which makes use of the specified batch allocator. */ Redefining with(RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator); /** * A failure handler is responsible for reacting to failed type redefinitions. * * @param redefinitionFailureHandler The failure handler to apply. * @return A new instance of this agent builder which makes use of the specified failure handler. */ Redefining with(RedefinitionStrategy.FailureHandler redefinitionFailureHandler); /** * <p> * A redefinition listener is invoked before each batch of type redefinitions and on every error as well as * after the redefinition was completed. A redefinition listener can be used for debugging or logging purposes * and to apply actions between each batch, e.g. to pause or wait in order to avoid rendering the current VM * non-responsive if a lot of classes are redefined. * </p> * <p> * Adding several listeners does not replace previous listeners but applies them in the registration order. * </p> * * @param redefinitionListener The listener to register. * @return A new instance of this agent builder which notifies the specified listener upon type redefinitions. */ Redefining with(RedefinitionStrategy.Listener redefinitionListener); } /** * A matcher that allows to determine if a {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} * should be applied during the execution of a {@link java.lang.instrument.ClassFileTransformer} that was * generated by an {@link net.bytebuddy.agent.builder.AgentBuilder}. */ interface RawMatcher { /** * Decides if the given {@code typeDescription} should be instrumented with the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s. * * @param typeDescription A description of the type to be instrumented. * @param classLoader The class loader of the instrumented type. Might be {@code null} if this class * loader represents the bootstrap class loader. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined The class being redefined which is only not {@code null} if a retransformation * is applied. * @param protectionDomain The protection domain of the type being transformed. * @return {@code true} if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should * be applied for the given {@code typeDescription}. */ boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain); /** * A conjunction of two raw matchers. */ class Conjunction implements RawMatcher { /** * The left matcher which is applied first. */ private final RawMatcher left; /** * The right matcher which is applied second. */ private final RawMatcher right; /** * Creates a new conjunction of two raw matchers. * * @param left The left matcher which is applied first. * @param right The right matcher which is applied second. */ protected Conjunction(RawMatcher left, RawMatcher right) { this.left = left; this.right = right; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return left.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) && right.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Conjunction that = (Conjunction) object; return left.equals(that.left) && right.equals(that.right); } @Override public int hashCode() { int result = left.hashCode(); result = 31 * result + right.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.Conjunction{" + "left=" + left + ", right=" + right + '}'; } } /** * A disjunction of two raw matchers. */ class Disjunction implements RawMatcher { /** * The left matcher which is applied first. */ private final RawMatcher left; /** * The right matcher which is applied second. */ private final RawMatcher right; /** * Creates a new disjunction of two raw matchers. * * @param left The left matcher which is applied first. * @param right The right matcher which is applied second. */ protected Disjunction(RawMatcher left, RawMatcher right) { this.left = left; this.right = right; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return left.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) || right.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Disjunction that = (Disjunction) object; return left.equals(that.left) && right.equals(that.right); } @Override public int hashCode() { int result = left.hashCode(); result = 31 * result + right.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.Disjunction{" + "left=" + left + ", right=" + right + '}'; } } /** * A raw matcher implementation that checks a {@link TypeDescription} * and its {@link java.lang.ClassLoader} against two suitable matchers in order to determine if the matched * type should be instrumented. */ class ForElementMatchers implements RawMatcher { /** * The type matcher to apply to a {@link TypeDescription}. */ private final ElementMatcher<? super TypeDescription> typeMatcher; /** * The class loader matcher to apply to a {@link java.lang.ClassLoader}. */ private final ElementMatcher<? super ClassLoader> classLoaderMatcher; /** * A module matcher to apply to a {@code java.lang.reflect.Module}. */ private final ElementMatcher<? super JavaModule> moduleMatcher; /** * Creates a new {@link net.bytebuddy.agent.builder.AgentBuilder.RawMatcher} that only matches the * supplied {@link TypeDescription} and its {@link java.lang.ClassLoader} against two matcher in order * to decided if an instrumentation should be conducted. * * @param typeMatcher The type matcher to apply to a {@link TypeDescription}. * @param classLoaderMatcher The class loader matcher to apply to a {@link java.lang.ClassLoader}. * @param moduleMatcher A module matcher to apply to a {@code java.lang.reflect.Module}. */ public ForElementMatchers(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { this.typeMatcher = typeMatcher; this.classLoaderMatcher = classLoaderMatcher; this.moduleMatcher = moduleMatcher; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return moduleMatcher.matches(module) && classLoaderMatcher.matches(classLoader) && typeMatcher.matches(typeDescription); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && classLoaderMatcher.equals(((ForElementMatchers) other).classLoaderMatcher) && moduleMatcher.equals(((ForElementMatchers) other).moduleMatcher) && typeMatcher.equals(((ForElementMatchers) other).typeMatcher); } @Override public int hashCode() { int result = typeMatcher.hashCode(); result = 31 * result + classLoaderMatcher.hashCode(); result = 31 * result + moduleMatcher.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.ForElementMatchers{" + "typeMatcher=" + typeMatcher + ", classLoaderMatcher=" + classLoaderMatcher + ", moduleMatcher=" + moduleMatcher + '}'; } } } /** * A listener that is informed about events that occur during an instrumentation process. */ interface Listener { /** * Invoked right before a successful transformation is applied. * * @param typeDescription The type that is being transformed. * @param classLoader The class loader which is loading this type. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param dynamicType The dynamic type that was created. */ void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType); /** * Invoked when a type is not transformed but ignored. * * @param typeDescription The type being ignored for transformation. * @param classLoader The class loader which is loading this type. * @param module The ignored type's module or {@code null} if the current VM does not support modules. */ void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module); /** * Invoked when an error has occurred during transformation. * * @param typeName The type name of the instrumented type. * @param classLoader The class loader which is loading this type. * @param module The instrumented type's module or {@code null} if the current VM does not support modules. * @param throwable The occurred error. */ void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable); /** * Invoked after a class was attempted to be loaded, independently of its treatment. * * @param typeName The binary name of the instrumented type. * @param classLoader The class loader which is loading this type. * @param module The instrumented type's module or {@code null} if the current VM does not support modules. */ void onComplete(String typeName, ClassLoader classLoader, JavaModule module); /** * A no-op implementation of a {@link net.bytebuddy.agent.builder.AgentBuilder.Listener}. */ enum NoOp implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { /* do nothing */ } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { /* do nothing */ } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.Listener.NoOp." + name(); } } /** * An adapter for a listener wher all methods are implemented as non-operational. */ abstract class Adapter implements Listener { @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { /* do nothing */ } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { /* do nothing */ } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { /* do nothing */ } } /** * A listener that writes events to a {@link PrintStream}. This listener prints a line per event, including the event type and * the name of the type in question. */ class StreamWriting implements Listener { /** * The prefix that is appended to all written messages. */ protected static final String PREFIX = "[Byte Buddy]"; /** * The print stream written to. */ private final PrintStream printStream; /** * Creates a new stream writing listener. * * @param printStream The print stream written to. */ public StreamWriting(PrintStream printStream) { this.printStream = printStream; } /** * Creates a new stream writing listener that writes to {@link System#out}. * * @return A listener writing events to the standard output stream. */ public static Listener toSystemOut() { return new StreamWriting(System.out); } /** * Creates a new stream writing listener that writes to {@link System#err}. * * @return A listener writing events to the standad error stream. */ public static Listener toSystemError() { return new StreamWriting(System.err); } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { printStream.println(PREFIX + " TRANSFORM " + typeDescription.getName() + "[" + classLoader + ", " + module + "]"); } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { printStream.println(PREFIX + " IGNORE " + typeDescription.getName() + "[" + classLoader + ", " + module + "]"); } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { synchronized (printStream) { printStream.println(PREFIX + " ERROR " + typeName + "[" + classLoader + ", " + module + "]"); throwable.printStackTrace(printStream); } } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { printStream.println(PREFIX + " COMPLETE " + typeName + "[" + classLoader + ", " + module + "]"); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && printStream.equals(((StreamWriting) other).printStream); } @Override public int hashCode() { return printStream.hashCode(); } @Override public String toString() { return "AgentBuilder.Listener.StreamWriting{" + "printStream=" + printStream + '}'; } } /** * A listener that adds read-edges to any module of an instrumented class upon its transformation. */ class ModuleReadEdgeCompleting extends Listener.Adapter { /** * The instrumentation instance used for adding read edges. */ private final Instrumentation instrumentation; /** * {@code true} if the listener should also add a read-edge from the supplied modules to the instrumented type's module. */ private final boolean addTargetEdge; /** * The modules to add as a read edge to any transformed class's module. */ private final Set<? extends JavaModule> modules; /** * Creates a new module read-edge completing listener. * * @param instrumentation The instrumentation instance used for adding read edges. * @param addTargetEdge {@code true} if the listener should also add a read-edge from the supplied modules * to the instrumented type's module. * @param modules The modules to add as a read edge to any transformed class's module. */ public ModuleReadEdgeCompleting(Instrumentation instrumentation, boolean addTargetEdge, Set<? extends JavaModule> modules) { this.instrumentation = instrumentation; this.addTargetEdge = addTargetEdge; this.modules = modules; } /** * Resolves a listener that adds module edges from and to the instrumented type's module. * * @param instrumentation The instrumentation instance used for adding read edges. * @param addTargetEdge {@code true} if the listener should also add a read-edge from the supplied * modules to the instrumented type's module. * @param type The types for which to extract the modules. * @return An appropriate listener. */ protected static Listener of(Instrumentation instrumentation, boolean addTargetEdge, Class<?>... type) { Set<JavaModule> modules = new HashSet<JavaModule>(); for (Class<?> aType : type) { JavaModule module = JavaModule.ofType(aType); if (module.isNamed()) { modules.add(module); } } return modules.isEmpty() ? Listener.NoOp.INSTANCE : new Listener.ModuleReadEdgeCompleting(instrumentation, addTargetEdge, modules); } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { if (module != null && module.isNamed()) { for (JavaModule target : modules) { if (!module.canRead(target)) { module.addReads(instrumentation, target); } if (addTargetEdge && !target.canRead(module)) { target.addReads(instrumentation, module); } } } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ModuleReadEdgeCompleting that = (ModuleReadEdgeCompleting) object; return instrumentation.equals(that.instrumentation) && addTargetEdge == that.addTargetEdge && modules.equals(that.modules); } @Override public int hashCode() { int result = instrumentation.hashCode(); result = 31 * result + modules.hashCode(); result = 31 * result + (addTargetEdge ? 1 : 0); return result; } @Override public String toString() { return "AgentBuilder.Listener.ModuleReadEdgeCompleting{" + "instrumentation=" + instrumentation + ", addTargetEdge=" + addTargetEdge + ", modules=" + modules + '}'; } } /** * A compound listener that allows to group several listeners in one instance. */ class Compound implements Listener { /** * The listeners that are represented by this compound listener in their application order. */ private final List<? extends Listener> listeners; /** * Creates a new compound listener. * * @param listener The listeners to apply in their application order. */ public Compound(Listener... listener) { this(Arrays.asList(listener)); } /** * Creates a new compound listener. * * @param listeners The listeners to apply in their application order. */ public Compound(List<? extends Listener> listeners) { this.listeners = listeners; } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { for (Listener listener : listeners) { listener.onTransformation(typeDescription, classLoader, module, dynamicType); } } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { for (Listener listener : listeners) { listener.onIgnored(typeDescription, classLoader, module); } } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { for (Listener listener : listeners) { listener.onError(typeName, classLoader, module, throwable); } } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { for (Listener listener : listeners) { listener.onComplete(typeName, classLoader, module); } } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && listeners.equals(((Compound) other).listeners); } @Override public int hashCode() { return listeners.hashCode(); } @Override public String toString() { return "AgentBuilder.Listener.Compound{" + "listeners=" + listeners + '}'; } } } /** * A type strategy is responsible for creating a type builder for a type that is being instrumented. */ interface TypeStrategy { /** * Creates a type builder for a given type. * * @param typeDescription The type being instrumented. * @param byteBuddy The Byte Buddy configuration. * @param classFileLocator The class file locator to use. * @param methodNameTransformer The method name transformer to use. * @return A type builder for the given arguments. */ DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer); /** * Default implementations of type strategies. */ enum Default implements TypeStrategy { /** * A definition handler that performs a rebasing for all types. */ REBASE { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.rebase(typeDescription, classFileLocator, methodNameTransformer); } }, /** * <p> * A definition handler that performs a redefinition for all types. * </p> * <p> * Note that the default agent builder is configured to apply a self initialization where a static class initializer * is added to the redefined class. This can be disabled by for example using a {@link InitializationStrategy.Minimal} or * {@link InitializationStrategy.NoOp}. Also, consider the constraints implied by {@link ByteBuddy#redefine(TypeDescription, ClassFileLocator)}. * </p> * <p> * For prohibiting any changes on a class file, use {@link AgentBuilder#disableClassFormatChanges()} * </p> */ REDEFINE { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.redefine(typeDescription, classFileLocator); } }, /** * <p> * A definition handler that performs a redefinition for all types and ignores all methods that were not declared by the instrumented type. * </p> * <p> * Note that the default agent builder is configured to apply a self initialization where a static class initializer * is added to the redefined class. This can be disabled by for example using a {@link InitializationStrategy.Minimal} or * {@link InitializationStrategy.NoOp}. Also, consider the constraints implied by {@link ByteBuddy#redefine(TypeDescription, ClassFileLocator)}. * </p> * <p> * For prohibiting any changes on a class file, use {@link AgentBuilder#disableClassFormatChanges()} * </p> */ REDEFINE_DECLARED_ONLY { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.redefine(typeDescription, classFileLocator).ignoreAlso(LatentMatcher.ForSelfDeclaredMethod.NOT_DECLARED); } }; @Override public String toString() { return "AgentBuilder.TypeStrategy.Default." + name(); } } /** * A type strategy that applies a build {@link EntryPoint}. */ class ForBuildEntryPoint implements TypeStrategy { /** * The entry point to apply. */ private final EntryPoint entryPoint; /** * Creates a new type strategy for an entry point. * * @param entryPoint The entry point to apply. */ public ForBuildEntryPoint(EntryPoint entryPoint) { this.entryPoint = entryPoint; } @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return entryPoint.transform(typeDescription, byteBuddy, classFileLocator, methodNameTransformer); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForBuildEntryPoint that = (ForBuildEntryPoint) object; return entryPoint.equals(that.entryPoint); } @Override public int hashCode() { return entryPoint.hashCode(); } @Override public String toString() { return "AgentBuilder.TypeStrategy.ForBuildEntryPoint{" + "entryPoint=" + entryPoint + '}'; } } } /** * A transformer allows to apply modifications to a {@link net.bytebuddy.dynamic.DynamicType}. Such a modification * is then applied to any instrumented type that was matched by the preceding matcher. */ interface Transformer { /** * Allows for a transformation of a {@link net.bytebuddy.dynamic.DynamicType.Builder}. * * @param builder The dynamic builder to transform. * @param typeDescription The description of the type currently being instrumented. * @param classLoader The class loader of the instrumented class. Might be {@code null} to * represent the bootstrap class loader. * @return A transformed version of the supplied {@code builder}. */ DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader); /** * A no-op implementation of a {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} that does * not modify the supplied dynamic type. */ enum NoOp implements Transformer { /** * The singleton instance. */ INSTANCE; @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { return builder; } @Override public String toString() { return "AgentBuilder.Transformer.NoOp." + name(); } } /** * A transformer that applies a build {@link Plugin}. */ class ForBuildPlugin implements Transformer { /** * The plugin to apply. */ private final Plugin plugin; /** * Creates a new transformer for a build {@link Plugin}. * * @param plugin The plugin to apply. */ public ForBuildPlugin(Plugin plugin) { this.plugin = plugin; } @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { return plugin.apply(builder, typeDescription); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForBuildPlugin that = (ForBuildPlugin) object; return plugin.equals(that.plugin); } @Override public int hashCode() { return plugin.hashCode(); } @Override public String toString() { return "AgentBuilder.Transformer.ForBuildPlugin{" + "plugin=" + plugin + '}'; } } /** * A compound transformer that allows to group several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s as a single transformer. */ class Compound implements Transformer { /** * The transformers to apply in their application order. */ private final Transformer[] transformer; /** * Creates a new compound transformer. * * @param transformer The transformers to apply in their application order. */ public Compound(Transformer... transformer) { this.transformer = transformer; } @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { for (Transformer transformer : this.transformer) { builder = transformer.transform(builder, typeDescription, classLoader); } return builder; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && Arrays.equals(transformer, ((Compound) other).transformer); } @Override public int hashCode() { return Arrays.hashCode(transformer); } @Override public String toString() { return "AgentBuilder.Transformer.Compound{" + "transformer=" + Arrays.toString(transformer) + '}'; } } } /** * A type locator allows to specify how {@link TypeDescription}s are resolved by an {@link net.bytebuddy.agent.builder.AgentBuilder}. */ interface PoolStrategy { /** * Creates a type pool for a given class file locator. * * @param classFileLocator The class file locator to use. * @param classLoader The class loader for which the class file locator was created. * @return A type pool for the supplied class file locator. */ TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader); /** * <p> * A default type locator that resolves types only if any property that is not the type's name is requested. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. * </p> */ enum Default implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ Default(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default.WithLazyResolution(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode); } @Override public String toString() { return "AgentBuilder.PoolStrategy.Default." + name(); } } /** * <p> * A type locator that resolves all type descriptions eagerly. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. * </p> */ enum Eager implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ Eager(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode); } @Override public String toString() { return "AgentBuilder.PoolStrategy.Eager." + name(); } } /** * <p> * A type locator that attempts loading a type if it cannot be located by the underlying lazy type pool. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. Any types * are loaded via the instrumented type's {@link ClassLoader}. * </p> */ enum ClassLoading implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ ClassLoading(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return TypePool.ClassLoading.of(classLoader, new TypePool.Default.WithLazyResolution(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode)); } @Override public String toString() { return "AgentBuilder.PoolStrategy.ClassLoading." + name(); } } /** * <p> * A type locator that uses type pools but allows for the configuration of a custom cache provider by class loader. Note that a * {@link TypePool} can grow in size and that a static reference is kept to this pool by Byte Buddy's registration of a * {@link ClassFileTransformer} what can cause a memory leak if the supplied caches are not cleared on a regular basis. Also note * that a cache provider can be accessed concurrently by multiple {@link ClassLoader}s. * </p> * <p> * All types that are returned by the locator's type pool are resolved lazily. * </p> */ abstract class WithTypePoolCache implements PoolStrategy { /** * The reader mode to use for parsing a class file. */ protected final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator that creates {@link TypePool}s but provides a custom {@link net.bytebuddy.pool.TypePool.CacheProvider}. * * @param readerMode The reader mode to use for parsing a class file. */ protected WithTypePoolCache(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default.WithLazyResolution(locate(classLoader), classFileLocator, readerMode); } /** * Locates a cache provider for a given class loader. * * @param classLoader The class loader for which to locate a cache. This class loader might be {@code null} to represent the bootstrap loader. * @return The cache provider to use. */ protected abstract TypePool.CacheProvider locate(ClassLoader classLoader); @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; WithTypePoolCache that = (WithTypePoolCache) object; return readerMode == that.readerMode; } @Override public int hashCode() { return readerMode.hashCode(); } /** * An implementation of a type locator {@link WithTypePoolCache} (note documentation of the linked class) that is based on a * {@link ConcurrentMap}. It is the responsibility of the type locator's user to avoid the type locator from leaking memory. */ public static class Simple extends WithTypePoolCache { /** * The concurrent map that is used for storing a cache provider per class loader. */ private final ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders; /** * Creates a new type locator that caches a cache provider per class loader in a concurrent map. The type * locator uses a fast {@link net.bytebuddy.pool.TypePool.Default.ReaderMode}. * * @param cacheProviders The concurrent map that is used for storing a cache provider per class loader. */ public Simple(ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders) { this(TypePool.Default.ReaderMode.FAST, cacheProviders); } /** * Creates a new type locator that caches a cache provider per class loader in a concurrent map. * * @param readerMode The reader mode to use for parsing a class file. * @param cacheProviders The concurrent map that is used for storing a cache provider per class loader. */ public Simple(TypePool.Default.ReaderMode readerMode, ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders) { super(readerMode); this.cacheProviders = cacheProviders; } @Override protected TypePool.CacheProvider locate(ClassLoader classLoader) { classLoader = classLoader == null ? getBootstrapMarkerLoader() : classLoader; TypePool.CacheProvider cacheProvider = cacheProviders.get(classLoader); while (cacheProvider == null) { cacheProvider = TypePool.CacheProvider.Simple.withObjectType(); TypePool.CacheProvider previous = cacheProviders.putIfAbsent(classLoader, cacheProvider); if (previous != null) { cacheProvider = previous; } } return cacheProvider; } /** * <p> * Returns the class loader to serve as a cache key if a cache provider for the bootstrap class loader is requested. * This class loader is represented by {@code null} in the JVM which is an invalid value for many {@link ConcurrentMap} * implementations. * </p> * <p> * By default, {@link ClassLoader#getSystemClassLoader()} is used as such a key as any resource location for the * bootstrap class loader is performed via the system class loader within Byte Buddy as {@code null} cannot be queried * for resources via method calls such that this does not make a difference. * </p> * * @return A class loader to represent the bootstrap class loader. */ protected ClassLoader getBootstrapMarkerLoader() { return ClassLoader.getSystemClassLoader(); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; if (!super.equals(object)) return false; Simple simple = (Simple) object; return cacheProviders.equals(simple.cacheProviders); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + cacheProviders.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.PoolStrategy.WithTypePoolCache.Simple{" + "cacheProviders=" + cacheProviders + '}'; } } } } /** * An initialization strategy which determines the handling of {@link net.bytebuddy.implementation.LoadedTypeInitializer}s * and the loading of auxiliary types. The agent builder does not reuse the {@link TypeResolutionStrategy} as Javaagents cannot access * a loaded class after a transformation such that different initialization strategies become meaningful. */ interface InitializationStrategy { /** * Creates a new dispatcher for injecting this initialization strategy during a transformation process. * * @return The dispatcher to be used. */ Dispatcher dispatcher(); /** * A dispatcher for changing a class file to adapt a self-initialization strategy. */ interface Dispatcher { /** * Transforms the instrumented type to implement an appropriate initialization strategy. * * @param builder The builder which should implement the initialization strategy. * @return The given {@code builder} with the initialization strategy applied. */ DynamicType.Builder<?> apply(DynamicType.Builder<?> builder); /** * Registers a dynamic type for initialization and/or begins the initialization process. * * @param dynamicType The dynamic type that is created. * @param classLoader The class loader of the dynamic type. * @param injectorFactory The injector factory */ void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory); /** * A factory for creating a {@link ClassInjector} only if it is required. */ interface InjectorFactory { /** * Resolves the class injector for this factory. * * @return The class injector for this factory. */ ClassInjector resolve(); } } /** * A non-initializing initialization strategy. */ enum NoOp implements InitializationStrategy, Dispatcher { /** * The singleton instance. */ INSTANCE; @Override public Dispatcher dispatcher() { return this; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder; } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.InitializationStrategy.NoOp." + name(); } } /** * An initialization strategy that adds a code block to an instrumented type's type initializer which * then calls a specific class that is responsible for the explicit initialization. */ @SuppressFBWarnings(value = "DMI_RANDOM_USED_ONLY_ONCE", justification = "Avoiding synchronization without security concerns") enum SelfInjection implements InitializationStrategy { /** * A form of self-injection where auxiliary types that are annotated by * {@link net.bytebuddy.implementation.auxiliary.AuxiliaryType.SignatureRelevant} of the instrumented type are loaded lazily and * any other auxiliary type is loaded eagerly. */ SPLIT { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Split(new Random().nextInt()); } }, /** * A form of self-injection where any auxiliary type is loaded lazily. */ LAZY { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Lazy(new Random().nextInt()); } }, /** * A form of self-injection where any auxiliary type is loaded eagerly. */ EAGER { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Eager(new Random().nextInt()); } }; @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection." + name(); } /** * A dispatcher for a self-initialization strategy. */ protected abstract static class Dispatcher implements InitializationStrategy.Dispatcher { /** * A random identification for the applied self-initialization. */ protected final int identification; /** * Creates a new dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Dispatcher(int identification) { this.identification = identification; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder.initializer(new NexusAccessor.InitializationAppender(identification)); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && identification == ((Dispatcher) other).identification; } @Override public int hashCode() { return identification; } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#SPLIT} strategy. */ protected static class Split extends Dispatcher { /** * Creates a new split dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Split(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); LoadedTypeInitializer loadedTypeInitializer; if (!auxiliaryTypes.isEmpty()) { TypeDescription instrumentedType = dynamicType.getTypeDescription(); ClassInjector classInjector = injectorFactory.resolve(); Map<TypeDescription, byte[]> independentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); Map<TypeDescription, byte[]> dependentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); for (TypeDescription auxiliaryType : auxiliaryTypes.keySet()) { (auxiliaryType.getDeclaredAnnotations().isAnnotationPresent(AuxiliaryType.SignatureRelevant.class) ? dependentTypes : independentTypes).remove(auxiliaryType); } Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); if (!independentTypes.isEmpty()) { for (Map.Entry<TypeDescription, Class<?>> entry : classInjector.inject(independentTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } Map<TypeDescription, LoadedTypeInitializer> lazyInitializers = new HashMap<TypeDescription, LoadedTypeInitializer>(loadedTypeInitializers); loadedTypeInitializers.keySet().removeAll(independentTypes.keySet()); loadedTypeInitializer = lazyInitializers.size() > 1 // there exist auxiliary types that need lazy loading ? new InjectingInitializer(instrumentedType, dependentTypes, lazyInitializers, classInjector) : lazyInitializers.get(instrumentedType); } else { loadedTypeInitializer = dynamicType.getLoadedTypeInitializers().get(dynamicType.getTypeDescription()); } NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Split{identification=" + identification + "}"; } } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#LAZY} strategy. */ protected static class Lazy extends Dispatcher { /** * Creates a new lazy dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Lazy(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); LoadedTypeInitializer loadedTypeInitializer = auxiliaryTypes.isEmpty() ? dynamicType.getLoadedTypeInitializers().get(dynamicType.getTypeDescription()) : new InjectingInitializer(dynamicType.getTypeDescription(), auxiliaryTypes, dynamicType.getLoadedTypeInitializers(), injectorFactory.resolve()); NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Lazy{identification=" + identification + "}"; } } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#EAGER} strategy. */ protected static class Eager extends Dispatcher { /** * Creates a new eager dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Eager(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); if (!auxiliaryTypes.isEmpty()) { for (Map.Entry<TypeDescription, Class<?>> entry : injectorFactory.resolve().inject(auxiliaryTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } LoadedTypeInitializer loadedTypeInitializer = loadedTypeInitializers.get(dynamicType.getTypeDescription()); NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Eager{identification=" + identification + "}"; } } /** * A type initializer that injects all auxiliary types of the instrumented type. */ protected static class InjectingInitializer implements LoadedTypeInitializer { /** * The instrumented type. */ private final TypeDescription instrumentedType; /** * The auxiliary types mapped to their class file representation. */ private final Map<TypeDescription, byte[]> rawAuxiliaryTypes; /** * The instrumented types and auxiliary types mapped to their loaded type initializers. * The instrumented types and auxiliary types mapped to their loaded type initializers. */ private final Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers; /** * The class injector to use. */ private final ClassInjector classInjector; /** * Creates a new injection initializer. * * @param instrumentedType The instrumented type. * @param rawAuxiliaryTypes The auxiliary types mapped to their class file representation. * @param loadedTypeInitializers The instrumented types and auxiliary types mapped to their loaded type initializers. * @param classInjector The class injector to use. */ protected InjectingInitializer(TypeDescription instrumentedType, Map<TypeDescription, byte[]> rawAuxiliaryTypes, Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers, ClassInjector classInjector) { this.instrumentedType = instrumentedType; this.rawAuxiliaryTypes = rawAuxiliaryTypes; this.loadedTypeInitializers = loadedTypeInitializers; this.classInjector = classInjector; } @Override public void onLoad(Class<?> type) { for (Map.Entry<TypeDescription, Class<?>> auxiliary : classInjector.inject(rawAuxiliaryTypes).entrySet()) { loadedTypeInitializers.get(auxiliary.getKey()).onLoad(auxiliary.getValue()); } loadedTypeInitializers.get(instrumentedType).onLoad(type); } @Override public boolean isAlive() { return true; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InjectingInitializer that = (InjectingInitializer) o; return classInjector.equals(that.classInjector) && instrumentedType.equals(that.instrumentedType) && rawAuxiliaryTypes.equals(that.rawAuxiliaryTypes) && loadedTypeInitializers.equals(that.loadedTypeInitializers); } @Override public int hashCode() { int result = instrumentedType.hashCode(); result = 31 * result + rawAuxiliaryTypes.hashCode(); result = 31 * result + loadedTypeInitializers.hashCode(); result = 31 * result + classInjector.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.InjectingInitializer{" + "instrumentedType=" + instrumentedType + ", rawAuxiliaryTypes=" + rawAuxiliaryTypes + ", loadedTypeInitializers=" + loadedTypeInitializers + ", classInjector=" + classInjector + '}'; } } } } /** * An initialization strategy that loads auxiliary types before loading the instrumented type. This strategy skips all types * that are a subtype of the instrumented type which would cause a premature loading of the instrumented type and abort * the instrumentation process. */ enum Minimal implements InitializationStrategy, Dispatcher { /** * The singleton instance. */ INSTANCE; @Override public Dispatcher dispatcher() { return this; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder; } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); Map<TypeDescription, byte[]> independentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); for (TypeDescription auxiliaryType : auxiliaryTypes.keySet()) { if (!auxiliaryType.getDeclaredAnnotations().isAnnotationPresent(AuxiliaryType.SignatureRelevant.class)) { independentTypes.remove(auxiliaryType); } } if (!independentTypes.isEmpty()) { ClassInjector classInjector = injectorFactory.resolve(); Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); for (Map.Entry<TypeDescription, Class<?>> entry : classInjector.inject(independentTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } } @Override public String toString() { return "AgentBuilder.InitializationStrategy.Minimal." + name(); } } } /** * A description strategy is responsible for resolving a {@link TypeDescription} when transforming or retransforming/-defining a type. */ interface DescriptionStrategy { /** * Describes the given type. * * @param typeName The binary name of the type to describe. * @param type The type that is being redefined, if a redefinition is applied or {@code null} if no redefined type is available. * @param typePool The type pool to use for locating a type if required. * @return An appropriate type description. */ TypeDescription apply(String typeName, Class<?> type, TypePool typePool); /** * Indicates if this description strategy makes use of loaded type information and yields a different type description if no loaded type is available. * * @return {@code true} if this description strategy prefers loaded type information when describing a type and only uses a type pool * if loaded type information is not available. */ boolean isLoadedFirst(); /** * Default implementations of a {@link DescriptionStrategy}. */ enum Default implements DescriptionStrategy { /** * A description type strategy represents a type as a {@link net.bytebuddy.description.type.TypeDescription.ForLoadedType} if a * retransformation or redefinition is applied on a type. Using a loaded type typically results in better performance as no * I/O is required for resolving type descriptions. However, any interaction with the type is carried out via the Java reflection * API. Using the reflection API triggers eager loading of any type that is part of a method or field signature. If any of these * types are missing from the class path, this eager loading will cause a {@link NoClassDefFoundError}. Some Java code declares * optional dependencies to other classes which are only realized if the optional dependency is present. Such code relies on the * Java reflection API not being used for types using optional dependencies. * * @see FallbackStrategy.Simple#ENABLED * @see FallbackStrategy.ByThrowableType#ofOptionalTypes() */ HYBRID(true) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { return type == null ? typePool.describe(typeName).resolve() : new TypeDescription.ForLoadedType(type); } }, /** * <p> * A description strategy that always describes Java types using a {@link TypePool}. This requires that any type - even if it is already * loaded and a {@link Class} instance is available - is processed as a non-loaded type description. Doing so can cause overhead as processing * loaded types is supported very efficiently by a JVM. * </p> * <p> * Avoiding the usage of loaded types can improve robustness as this approach does not rely on the Java reflection API which triggers eager * validation of this loaded type which can fail an application if optional types are used by any types field or method signatures. Also, it * is possible to guarantee debugging meta data to be available also for retransformed or redefined types if a {@link TypeStrategy} specifies * the extraction of such meta data. * </p> */ POOL_ONLY(false) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { return typePool.describe(typeName).resolve(); } }, /** * <p> * A description strategy that always describes Java types using a {@link TypePool} unless a type cannot be resolved by a pool and a loaded * {@link Class} instance is available. Doing so can cause overhead as processing loaded types is supported very efficiently by a JVM. * </p> * <p> * Avoiding the usage of loaded types can improve robustness as this approach does not rely on the Java reflection API which triggers eager * validation of this loaded type which can fail an application if optional types are used by any types field or method signatures. Also, it * is possible to guarantee debugging meta data to be available also for retransformed or redefined types if a {@link TypeStrategy} specifies * the extraction of such meta data. * </p> */ POOL_FIRST(false) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { TypePool.Resolution resolution = typePool.describe(typeName); return resolution.isResolved() || type == null ? resolution.resolve() : new TypeDescription.ForLoadedType(type); } }; /** * Indicates if loaded type information is preferred over using a type pool for describing a type. */ private final boolean loadedFirst; /** * Indicates if loaded type information is preferred over using a type pool for describing a type. * * @param loadedFirst {@code true} if loaded type information is preferred over using a type pool for describing a type. */ Default(boolean loadedFirst) { this.loadedFirst = loadedFirst; } @Override public boolean isLoadedFirst() { return loadedFirst; } @Override public String toString() { return "AgentBuilder.DescriptionStrategy.Default." + name(); } } } /** * An installation strategy determines the reaction to a raised exception after the registration of a {@link ClassFileTransformer}. */ interface InstallationStrategy { /** * Handles an error that occured after registering a class file transformer during installation. * * @param instrumentation The instrumentation onto which the class file transformer was registered. * @param classFileTransformer The class file transformer that was registered. * @param throwable The error that occurred. * @return The class file transformer to return when an error occurred. */ ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable); /** * Default implementations of installation strategies. */ enum Default implements InstallationStrategy { /** * <p> * An installation strategy that unregisters the transformer and propagates the exception. Using this strategy does not guarantee * that the registered transformer was not applied to any class, nor does it attempt to revert previous transformations. It only * guarantees that the class file transformer is unregistered and does no longer apply after this method returns. * </p> * <p> * <b>Note</b>: This installation strategy does not undo any applied class redefinitions, if such were applied. * </p> */ ESCALATING { @Override public ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable) { instrumentation.removeTransformer(classFileTransformer); throw new IllegalStateException("Could not install class file transformer", throwable); } }, /** * An installation strategy that retains the class file transformer and suppresses the error. */ SUPPRESSING { @Override public ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable) { return classFileTransformer; } }; @Override public String toString() { return "AgentBuilder.InstallationStrategy.Default." + name(); } } } /** * A strategy for creating a {@link ClassFileLocator} when instrumenting a type. */ interface LocationStrategy { /** * Creates a class file locator for a given class loader and module combination. * * @param classLoader The class loader that is loading an instrumented type. Might be {@code null} to represent the bootstrap class loader. * @param module The type's module or {@code null} if Java modules are not supported on the current VM. * @return The class file locator to use. */ ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module); /** * A location strategy that never locates any byte code. */ enum NoOp implements LocationStrategy { /** * The singleton instance. */ INSTANCE; @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.NoOp.INSTANCE; } @Override public String toString() { return "AgentBuilder.LocationStrategy.NoOp." + name(); } } /** * A location strategy that locates class files by querying an instrumented type's {@link ClassLoader}. */ enum ForClassLoader implements LocationStrategy { /** * A location strategy that keeps a strong reference to the class loader the created class file locator represents. */ STRONG { @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.ForClassLoader.of(classLoader); } }, /** * A location strategy that keeps a weak reference to the class loader the created class file locator represents. * As a consequence, any returned class file locator stops working once the represented class loader is garbage collected. */ WEAK { @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.ForClassLoader.WeaklyReferenced.of(classLoader); } }; /** * Adds additional location strategies as fallbacks to this location strategy. * * @param classFileLocator The class file locators to query if this location strategy cannot locate a class file. * @return A compound location strategy that first applies this location strategy and then queries the supplied class file locators. */ public LocationStrategy withFallbackTo(ClassFileLocator... classFileLocator) { return withFallbackTo(Arrays.asList(classFileLocator)); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param classFileLocators The class file locators to query if this location strategy cannot locate a class file. * @return A compound location strategy that first applies this location strategy and then queries the supplied class file locators. */ public LocationStrategy withFallbackTo(Collection<? extends ClassFileLocator> classFileLocators) { List<LocationStrategy> locationStrategies = new ArrayList<LocationStrategy>(classFileLocators.size()); for (ClassFileLocator classFileLocator : classFileLocators) { locationStrategies.add(new Simple(classFileLocator)); } return withFallbackTo(locationStrategies); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param locationStrategy The fallback location strategies to use. * @return A compound location strategy that first applies this location strategy and then the supplied fallback location strategies * in the supplied order. */ public LocationStrategy withFallbackTo(LocationStrategy... locationStrategy) { return withFallbackTo(Arrays.asList(locationStrategy)); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param locationStrategies The fallback location strategies to use. * @return A compound location strategy that first applies this location strategy and then the supplied fallback location strategies * in the supplied order. */ public LocationStrategy withFallbackTo(List<? extends LocationStrategy> locationStrategies) { List<LocationStrategy> allLocationStrategies = new ArrayList<LocationStrategy>(locationStrategies.size() + 1); allLocationStrategies.add(this); allLocationStrategies.addAll(locationStrategies); return new Compound(allLocationStrategies); } @Override public String toString() { return "AgentBuilder.LocationStrategy.ForClassLoader." + name(); } } /** * A simple location strategy that queries a given class file locator. */ class Simple implements LocationStrategy { /** * The class file locator to query. */ private final ClassFileLocator classFileLocator; /** * A simple location strategy that queries a given class file locator. * * @param classFileLocator The class file locator to query. */ public Simple(ClassFileLocator classFileLocator) { this.classFileLocator = classFileLocator; } @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return classFileLocator; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Simple simple = (Simple) object; return classFileLocator.equals(simple.classFileLocator); } @Override public int hashCode() { return classFileLocator.hashCode(); } @Override public String toString() { return "AgentBuilder.LocationStrategy.Simple{" + "classFileLocator=" + classFileLocator + '}'; } } /** * A compound location strategy that applies a list of location strategies. */ class Compound implements LocationStrategy { /** * The location strategies in their application order. */ private final List<? extends LocationStrategy> locationStrategies; /** * Creates a new compound location strategy. * * @param locationStrategy The location strategies in their application order. */ public Compound(LocationStrategy... locationStrategy) { this(Arrays.asList(locationStrategy)); } /** * Creates a new compound location strategy. * * @param locationStrategies The location strategies in their application order. */ public Compound(List<? extends LocationStrategy> locationStrategies) { this.locationStrategies = locationStrategies; } @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { List<ClassFileLocator> classFileLocators = new ArrayList<ClassFileLocator>(locationStrategies.size()); for (LocationStrategy locationStrategy : locationStrategies) { classFileLocators.add(locationStrategy.classFileLocator(classLoader, module)); } return new ClassFileLocator.Compound(classFileLocators); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Compound compound = (Compound) object; return locationStrategies.equals(compound.locationStrategies); } @Override public int hashCode() { return locationStrategies.hashCode(); } @Override public String toString() { return "AgentBuilder.LocationStrategy.Compound{" + "locationStrategies=" + locationStrategies + '}'; } } } /** * A fallback strategy allows to reattempt a transformation or a consideration for redefinition/retransformation in case an exception * occurs. Doing so, it is possible to use a {@link TypePool} rather than using a loaded type description backed by a {@link Class}. * Loaded types can raise exceptions and errors if a {@link ClassLoader} cannot resolve all types that this class references. Using * a type pool, such errors can be avoided as type descriptions can be resolved lazily, avoiding such errors. */ interface FallbackStrategy { /** * Returns {@code true} if the supplied type and throwable combination should result in a reattempt where the * loaded type is not used for querying information. * * @param type The loaded type that was queried during the transformation attempt. * @param throwable The error or exception that was caused during the transformation. * @return {@code true} if the supplied type and throwable combination should */ boolean isFallback(Class<?> type, Throwable throwable); /** * A simple fallback strategy that either always reattempts a transformation or never does so. */ enum Simple implements FallbackStrategy { /** * An enabled fallback strategy that always attempts a new trial. */ ENABLED(true), /** * A disabled fallback strategy that never attempts a new trial. */ DISABLED(false); /** * {@code true} if this fallback strategy is enabled. */ private final boolean enabled; /** * Creates a new default fallback strategy. * * @param enabled {@code true} if this fallback strategy is enabled. */ Simple(boolean enabled) { this.enabled = enabled; } @Override public boolean isFallback(Class<?> type, Throwable throwable) { return enabled; } @Override public String toString() { return "AgentBuilder.FallbackStrategy.Simple." + name(); } } /** * A fallback strategy that discriminates by the type of the {@link Throwable} that triggered a request. */ class ByThrowableType implements FallbackStrategy { /** * A set of throwable types that should trigger a fallback attempt. */ private final Set<? extends Class<? extends Throwable>> types; /** * Creates a new throwable type-discriminating fallback strategy. * * @param type The throwable types that should trigger a fallback. */ @SuppressWarnings("unchecked") // In absence of @SafeVarargs for Java 6 public ByThrowableType(Class<? extends Throwable>... type) { this(new HashSet<Class<? extends Throwable>>(Arrays.asList(type))); } /** * Creates a new throwable type-discriminating fallback strategy. * * @param types The throwable types that should trigger a fallback. */ public ByThrowableType(Set<? extends Class<? extends Throwable>> types) { this.types = types; } /** * Creates a fallback strategy that attempts a fallback if an error indicating a type error is the reason for requesting a reattempt. * * @return A fallback strategy that triggers a reattempt if a {@link LinkageError} or a {@link TypeNotPresentException} is raised. */ @SuppressWarnings("unchecked") // In absence of @SafeVarargs for Java 6 public static FallbackStrategy ofOptionalTypes() { return new ByThrowableType(LinkageError.class, TypeNotPresentException.class); } @Override public boolean isFallback(Class<?> type, Throwable throwable) { for (Class<? extends Throwable> aType : types) { if (aType.isInstance(throwable)) { return true; } } return false; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ByThrowableType byType = (ByThrowableType) object; return types.equals(byType.types); } @Override public int hashCode() { return types.hashCode(); } @Override public String toString() { return "AgentBuilder.FallbackStrategy.ByThrowableType{" + "types=" + types + '}'; } } } /** * <p> * A redefinition strategy regulates how already loaded classes are modified by a built agent. * </p> * <p> * <b>Important</b>: Most JVMs do not support changes of a class's structure after a class was already * loaded. Therefore, it is typically required that this class file transformer was built while enabling * {@link AgentBuilder#disableClassFormatChanges()}. * </p> */ enum RedefinitionStrategy { /** * Disables redefinition such that already loaded classes are not affected by the agent. */ DISABLED { @Override protected boolean isRetransforming(Instrumentation instrumentation) { return false; } @Override protected Delegate<?> make(Default.Transformation transformation) { throw new IllegalStateException("A disabled redefinition strategy cannot create a collector"); } }, /** * <p> * Applies a <b>redefinition</b> to all classes that are already loaded and that would have been transformed if * the built agent was registered before they were loaded. The created {@link ClassFileTransformer} is <b>not</b> * registered for applying retransformations. * </p> * <p> * Using this strategy, a redefinition is applied as a single transformation request. This means that a single illegal * redefinition of a class causes the entire redefinition attempt to fail. * </p> * <p> * <b>Note</b>: When applying a redefinition, it is normally required to use a {@link TypeStrategy} that applies * a redefinition instead of rebasing classes such as {@link TypeStrategy.Default#REDEFINE}. Also, consider * the constrains given by this type strategy. * </p> */ REDEFINITION { @Override protected boolean isRetransforming(Instrumentation instrumentation) { if (!instrumentation.isRedefineClassesSupported()) { throw new IllegalArgumentException("Cannot redefine classes: " + instrumentation); } return false; } @Override protected Delegate<?> make(Default.Transformation transformation) { return new Delegate.ForRedefinition(transformation); } }, /** * <p> * Applies a <b>retransformation</b> to all classes that are already loaded and that would have been transformed if * the built agent was registered before they were loaded. The created {@link ClassFileTransformer} is registered * for applying retransformations. * </p> * <p> * Using this strategy, a retransformation is applied as a single transformation request. This means that a single illegal * retransformation of a class causes the entire retransformation attempt to fail. * </p> * <p> * <b>Note</b>: When applying a redefinition, it is normally required to use a {@link TypeStrategy} that applies * a redefinition instead of rebasing classes such as {@link TypeStrategy.Default#REDEFINE}. Also, consider * the constrains given by this type strategy. * </p> */ RETRANSFORMATION { @Override protected boolean isRetransforming(Instrumentation instrumentation) { if (!instrumentation.isRetransformClassesSupported()) { throw new IllegalArgumentException("Cannot retransform classes: " + instrumentation); } return true; } @Override protected Delegate<?> make(Default.Transformation transformation) { return new Delegate.ForRetransformation(transformation); } }; /** * Indicates if this strategy requires a class file transformer to be registered with a hint to apply the * transformer for retransformation. * * @param instrumentation The instrumentation instance used. * @return {@code true} if a class file transformer must be registered with a hint for retransformation. */ protected abstract boolean isRetransforming(Instrumentation instrumentation); /** * Indicates that this redefinition strategy applies a modification of already loaded classes. * * @return {@code true} if this redefinition strategy applies a modification of already loaded classes. */ protected boolean isEnabled() { return this != DISABLED; } /** * Creates a collector instance that is responsible for collecting loaded classes for potential retransformation. * * @param transformation The transformation that is registered for the agent. * @return A new collector for collecting already loaded classes for transformation. */ protected abstract Delegate<?> make(Default.Transformation transformation); @Override public String toString() { return "AgentBuilder.RedefinitionStrategy." + name(); } /** * A batch allocator which is responsible for applying a redefinition in a batches. A class redefinition or * retransformation can be a time-consuming operation rendering a JVM non-responsive. In combination with a * a {@link RedefinitionStrategy.Listener}, it is also possible to apply pauses between batches to distribute * the load of a retransformation over time. */ public interface BatchAllocator { /** * Splits a list of types to be retransformed into seperate batches. * * @param types A list of types which should be retransformed. * @return An iterable of retransformations within a batch. */ Iterable<? extends List<Class<?>>> batch(List<Class<?>> types); /** * A batch allocator that includes all types in a single batch. */ enum ForTotal implements BatchAllocator { /** * The singleton instance. */ INSTANCE; @Override public Iterable<? extends List<Class<?>>> batch(List<Class<?>> types) { return Collections.singleton(types); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.BatchAllocator.ForTotal." + name(); } } /** * A batch allocator that creates chunks with a fixed size as batch jobs. */ class ForFixedSize implements BatchAllocator { /** * The size of each chunk. */ private final int size; /** * Creates a new batch allocator that creates fixed-sized chunks. * * @param size The size of each chunk. */ protected ForFixedSize(int size) { this.size = size; } /** * Creates a new batch allocator that creates chunks of a fixed size. * * @param size The size of each chunk or {@code 0} if the batch should be included in a single chunk. * @return An appropriate batch allocator. */ public static BatchAllocator ofSize(int size) { if (size > 0) { return new ForFixedSize(size); } else if (size == 0) { return ForTotal.INSTANCE; } else { throw new IllegalArgumentException("Cannot define a batch with a negative size: " + size); } } @Override public Iterable<? extends List<Class<?>>> batch(List<Class<?>> types) { List<List<Class<?>>> batches = new ArrayList<List<Class<?>>>(); for (int index = 0; index < types.size(); index += size) { batches.add(new ArrayList<Class<?>>(types.subList(index, Math.min(types.size(), index + size)))); } return batches; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForFixedSize that = (ForFixedSize) object; return size == that.size; } @Override public int hashCode() { return size; } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.BatchAllocator.ForFixedSize{" + "size=" + size + '}'; } } } /** * A failure handler to apply during a retransformation. */ public interface FailureHandler { /** * Invoked when a batch of a retransformation failed. * * @param types The types included in the batch. * @param throwable The throwable indicating the failure. * @return {@code true} if the batch failure should be considered as handled. */ boolean onBatchFailure(List<Class<?>> types, Throwable throwable); /** * Invoked after all batches were completed. * * @param failures A map of all failures that were not considered as handled. */ void onFailure(Map<List<Class<?>>, Throwable> failures); /** * Default implementations of {@link FailureHandler}s. */ enum Default implements FailureHandler { /** * A fail fast failure handler fails a redefinition on the first failed batch. */ FAIL_FAST { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { throw new IllegalStateException("Could not transform " + types, throwable); } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { throw new IllegalStateException("Unexpected recovery from batch failure"); } }, /** * A fail last failure handler fails a redefinition after all batches were run if at least one batch failed. */ FAIL_LAST { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return false; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { throw new IllegalStateException("Could not transform " + failures); } }, /** * A suppressing failure handler ignores any failed batches. */ IGNORING { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return false; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } }, /** * A suppressing failure handler ignores any failed batches and does not expose them to the any listeners. */ SUPPRESSING { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return true; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } }; @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.FailureHandler.Default." + name(); } } } /** * A listener to be applied during a redefinition. */ public interface Listener { /** * Invoked before applying a batch. * * @param index A running index of the batch starting at {@code 0}. * @param batch The types included in this batch. * @param types All types included in the retransformation. */ void onBatch(int index, List<Class<?>> batch, List<Class<?>> types); /** * Invoked upon an error during a batch. This method is not invoked if the failure handler handled this error. * * @param index A running index of the batch starting at {@code 0}. * @param batch The types included in this batch. * @param throwable The throwable that caused this invocation. * @param types All types included in the retransformation. */ void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types); /** * Invoked upon completion of all batches. * * @param index A total amount of batches that were executed. * @param types All types included in the retransformation. * @param failures A mapping of batch types to their unhandled failures. */ void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures); /** * A non-operational listener. */ enum NoOp implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { /* do nothing */ } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.NoOp." + name(); } } /** * A listener that invokes {@link Thread#yield()} prior to every batch but the first batch. */ enum Yielding implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { if (index > 0) { Thread.yield(); } } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Yielding." + name(); } } /** * A listener adapter that offers non-operational implementations of all listener methods. */ abstract class Adapter implements Listener { @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { /* do nothing */ } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } } /** * A listener that invokes {@link Thread#sleep(long)} prior to every batch but the first batch. */ class Pausing extends Adapter { /** * The time to sleep in milliseconds between every two batches. */ private final long value; /** * Creates a new pausing listener. * * @param value The time to sleep in milliseconds between every two batches. */ protected Pausing(long value) { this.value = value; } /** * Creates a listener that pauses for the specified amount of time. If the specified value is {@code 0}, a * non-operational listener is returned. * * @param value The amount of time to pause between redefinition batches. * @param timeUnit The time unit of {@code value}. * @return An appropriate listener. */ public static Listener of(long value, TimeUnit timeUnit) { if (value > 0L) { return new Pausing(timeUnit.toMillis(value)); } else if (value == 0L) { return NoOp.INSTANCE; } else { throw new IllegalArgumentException("Cannot sleep for a non-positive amount of time: " + value); } } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { if (index > 0) { try { Thread.sleep(value); } catch (InterruptedException exception) { throw new RuntimeException("Sleep was interrupted", exception); } } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Pausing pausing = (Pausing) object; return value == pausing.value; } @Override public int hashCode() { return (int) (value ^ (value >>> 32)); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Pausing{" + "value=" + value + '}'; } } /** * A listener that writes events to a {@link PrintStream}. */ class StreamWriting implements Listener { /** * The print stream to write any events to. */ private final PrintStream printStream; /** * Creates a new stream writing listener. * * @param printStream The print stream to write any events to. */ public StreamWriting(PrintStream printStream) { this.printStream = printStream; } /** * Writes the stream result to {@link System#out}. * * @return An appropriate listener. */ public static Listener toSystemOut() { return new StreamWriting(System.out); } /** * Writes the stream result to {@link System#err}. * * @return An appropriate listener. */ public static Listener toSystemError() { return new StreamWriting(System.err); } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM BATCH #" + index + " (" + batch.size() + " types)"); } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { synchronized (printStream) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM ERROR #" + index + " (" + batch.size() + " types)"); throwable.printStackTrace(printStream); } } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM COMPLETE " + index + " batches (" + failures.size() + " errors)"); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; StreamWriting streamWriting = (StreamWriting) object; return printStream.equals(streamWriting.printStream); } @Override public int hashCode() { return printStream.hashCode(); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.StreamWriting{" + "printStream=" + printStream + '}'; } } /** * A compound listener that delegates events to several listeners. */ class Compound implements Listener { /** * The listeners to invoke. */ private final List<? extends Listener> listeners; /** * Creates a new compound listener. * * @param listener The listeners to invoke. */ protected Compound(Listener... listener) { this(Arrays.asList(listener)); } /** * Creates a new compound listener. * * @param listeners The listeners to invoke. */ protected Compound(List<? extends Listener> listeners) { this.listeners = listeners; } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { for (Listener listener : listeners) { listener.onBatch(index, batch, types); } } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { for (Listener listener : listeners) { listener.onError(index, batch, throwable, types); } } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { for (Listener listener : listeners) { listener.onComplete(index, types, failures); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Compound compound = (Compound) object; return listeners.equals(compound.listeners); } @Override public int hashCode() { return listeners.hashCode(); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Compound{" + "listeners=" + listeners + '}'; } } } /** * A collector is responsible for collecting classes that are to be considered for modification. * * @param <T> The type of element that is supplied to the instrumentation API. */ protected abstract static class Delegate<T> { /** * The transformation of the built agent. */ protected final Default.Transformation transformation; /** * A list of already collected redefinitions. */ protected final List<Class<?>> types; /** * Creates a new delegate. * * @param transformation The transformation of the built agent. */ protected Delegate(Default.Transformation transformation) { this.transformation = transformation; types = new ArrayList<Class<?>>(); } /** * Considers the supplied type for redefinition. * * @param ignoredTypeMatcher The ignored type matcher. * @param listener The listener to notify. * @param typeDescription The type's description. * @param type The type being redefined. * @param classBeingRedefined The type being redefined or {@code null} if it should be considered unavailable. * @param module The redefined type's module or {@code null} if the current VM does not support the module system. */ protected void consider(RawMatcher ignoredTypeMatcher, AgentBuilder.Listener listener, TypeDescription typeDescription, Class<?> type, Class<?> classBeingRedefined, JavaModule module) { consider(ignoredTypeMatcher, listener, typeDescription, type, classBeingRedefined, module, false); } /** * Considers the supplied type for redefinition. * * @param ignoredTypeMatcher The ignored type matcher. * @param listener The listener to notify. * @param typeDescription The type's description. * @param type The type being redefined. * @param classBeingRedefined The type being redefined or {@code null} if it should be considered unavailable. * @param module The redefined type's module or {@code null} if the current VM does not support the module system. * @param unmodifiable {@code true} if the type should be seen as unmodifiable. */ protected void consider(RawMatcher ignoredTypeMatcher, AgentBuilder.Listener listener, TypeDescription typeDescription, Class<?> type, Class<?> classBeingRedefined, JavaModule module, boolean unmodifiable) { if (unmodifiable || !(transformation.isAlive(typeDescription, type.getClassLoader(), JavaModule.ofType(type), classBeingRedefined, type.getProtectionDomain(), ignoredTypeMatcher) && types.add(type))) { try { try { listener.onIgnored(typeDescription, type.getClassLoader(), module); } finally { listener.onComplete(typeDescription.getName(), type.getClassLoader(), module); } } catch (Throwable ignored) { // Ignore exceptions that are thrown by listeners to mimic the behavior of a transformation. } } } /** * Applies the current retransformation process. * * @param instrumentation The instrumentation instance to apply the redefinition upon. * @param locationStrategy The location strategy to use. * @param listener The listener to notify. * @param redefinitionBatchAllocator The redefinition batch allocator to use. * @param redefinitionListener The redefinition listener to use. * @param redefinitionFailureHandler The redefinition failure handler to use. */ protected void apply(Instrumentation instrumentation, LocationStrategy locationStrategy, AgentBuilder.Listener listener, BatchAllocator redefinitionBatchAllocator, Listener redefinitionListener, FailureHandler redefinitionFailureHandler) { int index = 0; Map<List<Class<?>>, Throwable> failures = new HashMap<List<Class<?>>, Throwable>(); for (List<Class<?>> batch : redefinitionBatchAllocator.batch(types)) { List<T> transformations = new ArrayList<T>(batch.size()); for (Class<?> type : batch) { try { transformations.add(transform(type, locationStrategy)); } catch (Throwable throwable) { JavaModule module = JavaModule.ofType(type); try { listener.onError(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module, throwable); } finally { listener.onComplete(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module); } } } redefinitionListener.onBatch(index, batch, types); if (!transformations.isEmpty()) { try { doApply(transformations, instrumentation); } catch (Throwable throwable) { if (!redefinitionFailureHandler.onBatchFailure(batch, throwable)) { failures.put(batch, throwable); redefinitionListener.onError(index, batch, throwable, types); } } finally { index++; } } } redefinitionListener.onComplete(index, types, failures); if (!failures.isEmpty()) { redefinitionFailureHandler.onFailure(failures); } } /** * Turns a type into a transformation-ready primitive of the current redefinition process. * * @param type The type to transform. * @param locationStrategy The location strategy to use. * @return A primitive of the current redefinition process. * @throws IOException If an I/O error occured. */ protected abstract T transform(Class<?> type, LocationStrategy locationStrategy) throws IOException; /** * Applies a type redefinition. * * @param transformations The transformations to apply. * @param instrumentation The instrumentation instance to apply the redefinition on. * @throws UnmodifiableClassException If a class was not modifiable. * @throws ClassNotFoundException If a class was not found. */ protected abstract void doApply(List<T> transformations, Instrumentation instrumentation) throws UnmodifiableClassException, ClassNotFoundException; @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Delegate." + getClass().getSimpleName() + "{" + "transformation=" + transformation + ", types=" + types + '}'; } /** * A delegate that applies a <b>redefinition</b> of already loaded classes. */ protected static class ForRedefinition extends Delegate<ClassDefinition> { /** * Creates a new delegate for a redefinition. * * @param transformation The transformation of the built agent. */ protected ForRedefinition(Default.Transformation transformation) { super(transformation); } @Override protected ClassDefinition transform(Class<?> type, LocationStrategy locationStrategy) throws IOException { return new ClassDefinition(type, locationStrategy.classFileLocator(type.getClassLoader(), JavaModule.ofType(type)) .locate(TypeDescription.ForLoadedType.getName(type)) .resolve()); } @Override protected void doApply(List<ClassDefinition> transformations, Instrumentation instrumentation) throws UnmodifiableClassException, ClassNotFoundException { instrumentation.redefineClasses(transformations.toArray(new ClassDefinition[transformations.size()])); } } /** * A delegate that applies a <b>retransformation</b> of already loaded classes. */ protected static class ForRetransformation extends Delegate<Class<?>> { /** * Creates a new delegate for a retransformation. * * @param transformation The transformation to apply. */ protected ForRetransformation(Default.Transformation transformation) { super(transformation); } @Override protected Class<?> transform(Class<?> type, LocationStrategy locationStrategy) { return type; } @Override protected void doApply(List<Class<?>> transformations, Instrumentation instrumentation) throws UnmodifiableClassException { instrumentation.retransformClasses(transformations.toArray(new Class<?>[transformations.size()])); } } } } /** * Implements the instrumentation of the {@code LambdaMetafactory} if this feature is enabled. */ enum LambdaInstrumentationStrategy { /** * A strategy that enables instrumentation of the {@code LambdaMetafactory} if such a factory exists on the current VM. * Classes representing lambda expressions that are created by Byte Buddy are fully compatible to those created by * the JVM and can be serialized or deserialized to one another. The classes do however show a few differences: * <ul> * <li>Byte Buddy's classes are public with a public executing transformer. Doing so, it is not necessary to instantiate a * non-capturing lambda expression by reflection. This is done because Byte Buddy is not necessarily capable * of using reflection due to an active security manager.</li> * <li>Byte Buddy's classes are not marked as synthetic as an agent builder does not instrument synthetic classes * by default.</li> * </ul> */ ENABLED { @Override protected void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer) { if (LambdaFactory.register(classFileTransformer, new LambdaInstanceFactory(byteBuddy), LambdaInjector.INSTANCE)) { Class<?> lambdaMetaFactory; try { lambdaMetaFactory = Class.forName("java.lang.invoke.LambdaMetafactory"); } catch (ClassNotFoundException ignored) { return; } byteBuddy.with(Implementation.Context.Disabled.Factory.INSTANCE) .redefine(lambdaMetaFactory) .visit(new AsmVisitorWrapper.ForDeclaredMethods() .method(named("metafactory"), MetaFactoryRedirection.INSTANCE) .method(named("altMetafactory"), AlternativeMetaFactoryRedirection.INSTANCE)) .make() .load(lambdaMetaFactory.getClassLoader(), ClassReloadingStrategy.of(instrumentation)); } } }, /** * A strategy that does not instrument the {@code LambdaMetafactory}. */ DISABLED { @Override protected void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer) { /* do nothing */ } }; /** * Indicates that an original implementation can be ignored when redefining a method. */ protected static final MethodVisitor IGNORE_ORIGINAL = null; /** * Releases the supplied class file transformer when it was built with {@link AgentBuilder#with(LambdaInstrumentationStrategy)} enabled. * Subsequently, the class file transformer is no longer applied when a class that represents a lambda expression is created. * * @param classFileTransformer The class file transformer to release. * @param instrumentation The instrumentation instance that is used to potentially rollback the instrumentation of the {@code LambdaMetafactory}. */ public static void release(ClassFileTransformer classFileTransformer, Instrumentation instrumentation) { if (LambdaFactory.release(classFileTransformer)) { try { ClassReloadingStrategy.of(instrumentation).reset(Class.forName("java.lang.invoke.LambdaMetafactory")); } catch (Exception exception) { throw new IllegalStateException("Could not release lambda transformer", exception); } } } /** * Returns an enabled lambda instrumentation strategy for {@code true}. * * @param enabled If lambda instrumentation should be enabled. * @return {@code true} if the returned strategy should be enabled. */ public static LambdaInstrumentationStrategy of(boolean enabled) { return enabled ? ENABLED : DISABLED; } /** * Applies a transformation to lambda instances if applicable. * * @param byteBuddy The Byte Buddy instance to use. * @param instrumentation The instrumentation instance for applying a redefinition. * @param classFileTransformer The class file transformer to apply. */ protected abstract void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer); /** * Indicates if this strategy enables instrumentation of the {@code LambdaMetafactory}. * * @return {@code true} if this strategy is enabled. */ public boolean isEnabled() { return this == ENABLED; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy." + name(); } /** * An injector for injecting the lambda class dispatcher to the system class path. */ protected enum LambdaInjector implements Callable<Class<?>> { /** * The singleton instance. */ INSTANCE; @Override public Class<?> call() throws Exception { TypeDescription lambdaFactory = new TypeDescription.ForLoadedType(LambdaFactory.class); return ClassInjector.UsingReflection.ofSystemClassLoader() .inject(Collections.singletonMap(lambdaFactory, ClassFileLocator.ForClassLoader.read(LambdaFactory.class).resolve())) .get(lambdaFactory); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInjector." + name(); } } /** * A factory that creates instances that represent lambda expressions. */ protected static class LambdaInstanceFactory { /** * The name of a factory for a lambda expression. */ private static final String LAMBDA_FACTORY = "get$Lambda"; /** * A prefix for a field that represents a property of a lambda expression. */ private static final String FIELD_PREFIX = "arg$"; /** * The infix to use for naming classes that represent lambda expression. The additional prefix * is necessary because the subsequent counter is not sufficient to keep names unique compared * to the original factory. */ private static final String LAMBDA_TYPE_INFIX = "$$Lambda$ByteBuddy$"; /** * A type-safe constant to express that a class is not already loaded when applying a class file transformer. */ private static final Class<?> NOT_PREVIOUSLY_DEFINED = null; /** * A counter for naming lambda expressions randomly. */ private static final AtomicInteger LAMBDA_NAME_COUNTER = new AtomicInteger(); /** * The Byte Buddy instance to use for creating lambda objects. */ private final ByteBuddy byteBuddy; /** * Creates a new lambda instance factory. * * @param byteBuddy The Byte Buddy instance to use for creating lambda objects. */ protected LambdaInstanceFactory(ByteBuddy byteBuddy) { this.byteBuddy = byteBuddy; } /** * Applies this lambda meta factory. * * @param targetTypeLookup A lookup context representing the creating class of this lambda expression. * @param lambdaMethodName The name of the lambda expression's represented method. * @param factoryMethodType The type of the lambda expression's represented method. * @param lambdaMethodType The type of the lambda expression's factory method. * @param targetMethodHandle A handle representing the target of the lambda expression's method. * @param specializedLambdaMethodType A specialization of the type of the lambda expression's represented method. * @param serializable {@code true} if the lambda expression should be serializable. * @param markerInterfaces A list of interfaces for the lambda expression to represent. * @param additionalBridges A list of additional bridge methods to be implemented by the lambda expression. * @param classFileTransformers A collection of class file transformers to apply when creating the class. * @return A binary representation of the transformed class file. */ public byte[] make(Object targetTypeLookup, String lambdaMethodName, Object factoryMethodType, Object lambdaMethodType, Object targetMethodHandle, Object specializedLambdaMethodType, boolean serializable, List<Class<?>> markerInterfaces, List<?> additionalBridges, Collection<? extends ClassFileTransformer> classFileTransformers) { JavaConstant.MethodType factoryMethod = JavaConstant.MethodType.ofLoaded(factoryMethodType); JavaConstant.MethodType lambdaMethod = JavaConstant.MethodType.ofLoaded(lambdaMethodType); JavaConstant.MethodHandle targetMethod = JavaConstant.MethodHandle.ofLoaded(targetMethodHandle, targetTypeLookup); JavaConstant.MethodType specializedLambdaMethod = JavaConstant.MethodType.ofLoaded(specializedLambdaMethodType); Class<?> targetType = JavaConstant.MethodHandle.lookupType(targetTypeLookup); String lambdaClassName = targetType.getName() + LAMBDA_TYPE_INFIX + LAMBDA_NAME_COUNTER.incrementAndGet(); DynamicType.Builder<?> builder = byteBuddy .subclass(factoryMethod.getReturnType(), ConstructorStrategy.Default.NO_CONSTRUCTORS) .modifiers(TypeManifestation.FINAL, Visibility.PUBLIC) .implement(markerInterfaces) .name(lambdaClassName) .defineConstructor(Visibility.PUBLIC) .withParameters(factoryMethod.getParameterTypes()) .intercept(ConstructorImplementation.INSTANCE) .method(named(lambdaMethodName) .and(takesArguments(lambdaMethod.getParameterTypes())) .and(returns(lambdaMethod.getReturnType()))) .intercept(new LambdaMethodImplementation(targetMethod, specializedLambdaMethod)); int index = 0; for (TypeDescription capturedType : factoryMethod.getParameterTypes()) { builder = builder.defineField(FIELD_PREFIX + ++index, capturedType, Visibility.PRIVATE, FieldManifestation.FINAL); } if (!factoryMethod.getParameterTypes().isEmpty()) { builder = builder.defineMethod(LAMBDA_FACTORY, factoryMethod.getReturnType(), Visibility.PRIVATE, Ownership.STATIC) .withParameters(factoryMethod.getParameterTypes()) .intercept(FactoryImplementation.INSTANCE); } if (serializable) { if (!markerInterfaces.contains(Serializable.class)) { builder = builder.implement(Serializable.class); } builder = builder.defineMethod("writeReplace", Object.class, Visibility.PRIVATE) .intercept(new SerializationImplementation(new TypeDescription.ForLoadedType(targetType), factoryMethod.getReturnType(), lambdaMethodName, lambdaMethod, targetMethod, JavaConstant.MethodType.ofLoaded(specializedLambdaMethodType))); } else if (factoryMethod.getReturnType().isAssignableTo(Serializable.class)) { builder = builder.defineMethod("readObject", void.class, Visibility.PRIVATE) .withParameters(ObjectInputStream.class) .throwing(NotSerializableException.class) .intercept(ExceptionMethod.throwing(NotSerializableException.class, "Non-serializable lambda")) .defineMethod("writeObject", void.class, Visibility.PRIVATE) .withParameters(ObjectOutputStream.class) .throwing(NotSerializableException.class) .intercept(ExceptionMethod.throwing(NotSerializableException.class, "Non-serializable lambda")); } for (Object additionalBridgeType : additionalBridges) { JavaConstant.MethodType additionalBridge = JavaConstant.MethodType.ofLoaded(additionalBridgeType); builder = builder.defineMethod(lambdaMethodName, additionalBridge.getReturnType(), MethodManifestation.BRIDGE, Visibility.PUBLIC) .withParameters(additionalBridge.getParameterTypes()) .intercept(new BridgeMethodImplementation(lambdaMethodName, lambdaMethod)); } byte[] classFile = builder.make().getBytes(); for (ClassFileTransformer classFileTransformer : classFileTransformers) { try { byte[] transformedClassFile = classFileTransformer.transform(targetType.getClassLoader(), lambdaClassName.replace('.', '/'), NOT_PREVIOUSLY_DEFINED, targetType.getProtectionDomain(), classFile); classFile = transformedClassFile == null ? classFile : transformedClassFile; } catch (Throwable ignored) { /* do nothing */ } } return classFile; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && byteBuddy.equals(((LambdaInstanceFactory) other).byteBuddy); } @Override public int hashCode() { return byteBuddy.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory{" + "byteBuddy=" + byteBuddy + '}'; } /** * Implements a lambda class's executing transformer. */ @SuppressFBWarnings(value = "SE_BAD_FIELD", justification = "An enumeration does not serialize fields") protected enum ConstructorImplementation implements Implementation { /** * The singleton instance. */ INSTANCE; /** * A reference to the {@link Object} class's default executing transformer. */ private final MethodDescription.InDefinedShape objectConstructor; /** * Creates a new executing transformer implementation. */ ConstructorImplementation() { objectConstructor = TypeDescription.OBJECT.getDeclaredMethods().filter(isConstructor()).getOnly(); } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.getInstrumentedType().getDeclaredFields()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.ConstructorImplementation." + name(); } /** * An appender to implement the executing transformer. */ protected static class Appender implements ByteCodeAppender { /** * The fields that are declared by the instrumented type. */ private final List<FieldDescription.InDefinedShape> declaredFields; /** * Creates a new appender. * * @param declaredFields The fields that are declared by the instrumented type. */ protected Appender(List<FieldDescription.InDefinedShape> declaredFields) { this.declaredFields = declaredFields; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { List<StackManipulation> fieldAssignments = new ArrayList<StackManipulation>(declaredFields.size() * 3); for (ParameterDescription parameterDescription : instrumentedMethod.getParameters()) { fieldAssignments.add(MethodVariableAccess.REFERENCE.loadOffset(0)); fieldAssignments.add(MethodVariableAccess.of(parameterDescription.getType()).loadOffset(parameterDescription.getOffset())); fieldAssignments.add(FieldAccess.forField(declaredFields.get(parameterDescription.getIndex())).putter()); } return new Size(new StackManipulation.Compound( MethodVariableAccess.REFERENCE.loadOffset(0), MethodInvocation.invoke(INSTANCE.objectConstructor), new StackManipulation.Compound(fieldAssignments), MethodReturn.VOID ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && declaredFields.equals(((Appender) other).declaredFields); } @Override public int hashCode() { return declaredFields.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.ConstructorImplementation.Appender{" + "declaredFields=" + declaredFields + '}'; } } } /** * An implementation of a instance factory for a lambda expression's class. */ protected enum FactoryImplementation implements Implementation { /** * The singleton instance. */ INSTANCE; @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.getInstrumentedType()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.FactoryImplementation." + name(); } /** * An appender for a lambda expression factory. */ protected static class Appender implements ByteCodeAppender { /** * The instrumented type. */ private final TypeDescription instrumentedType; /** * Creates a new appender. * * @param instrumentedType The instrumented type. */ protected Appender(TypeDescription instrumentedType) { this.instrumentedType = instrumentedType; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return new Size(new StackManipulation.Compound( TypeCreation.of(instrumentedType), Duplication.SINGLE, MethodVariableAccess.allArgumentsOf(instrumentedMethod), MethodInvocation.invoke(instrumentedType.getDeclaredMethods().filter(isConstructor()).getOnly()), MethodReturn.REFERENCE ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && instrumentedType.equals(((Appender) other).instrumentedType); } @Override public int hashCode() { return instrumentedType.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.FactoryImplementation.Appender{" + "instrumentedType=" + instrumentedType + '}'; } } } /** * Implements a lambda expression's functional method. */ protected static class LambdaMethodImplementation implements Implementation { /** * The handle of the target method of the lambda expression. */ private final JavaConstant.MethodHandle targetMethod; /** * The specialized type of the lambda method. */ private final JavaConstant.MethodType specializedLambdaMethod; /** * Creates a implementation of a lambda expression's functional method. * * @param targetMethod The target method of the lambda expression. * @param specializedLambdaMethod The specialized type of the lambda method. */ protected LambdaMethodImplementation(JavaConstant.MethodHandle targetMethod, JavaConstant.MethodType specializedLambdaMethod) { this.targetMethod = targetMethod; this.specializedLambdaMethod = specializedLambdaMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(targetMethod.getOwnerType() .getDeclaredMethods() .filter(named(targetMethod.getName()) .and(returns(targetMethod.getReturnType())) .and(takesArguments(targetMethod.getParameterTypes()))) .getOnly(), specializedLambdaMethod, implementationTarget.getInstrumentedType().getDeclaredFields()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; LambdaMethodImplementation that = (LambdaMethodImplementation) other; return targetMethod.equals(that.targetMethod) && specializedLambdaMethod.equals(that.specializedLambdaMethod); } @Override public int hashCode() { int result = targetMethod.hashCode(); result = 31 * result + specializedLambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.LambdaMethodImplementation{" + "targetMethod=" + targetMethod + ", specializedLambdaMethod=" + specializedLambdaMethod + '}'; } /** * An appender for a lambda expression's functional method. */ protected static class Appender implements ByteCodeAppender { /** * The target method of the lambda expression. */ private final MethodDescription targetMethod; /** * The specialized type of the lambda method. */ private final JavaConstant.MethodType specializedLambdaMethod; /** * The instrumented type's declared fields. */ private final List<FieldDescription.InDefinedShape> declaredFields; /** * Creates an appender of a lambda expression's functional method. * * @param targetMethod The target method of the lambda expression. * @param specializedLambdaMethod The specialized type of the lambda method. * @param declaredFields The instrumented type's declared fields. */ protected Appender(MethodDescription targetMethod, JavaConstant.MethodType specializedLambdaMethod, List<FieldDescription.InDefinedShape> declaredFields) { this.targetMethod = targetMethod; this.specializedLambdaMethod = specializedLambdaMethod; this.declaredFields = declaredFields; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { List<StackManipulation> fieldAccess = new ArrayList<StackManipulation>(declaredFields.size() * 2); for (FieldDescription.InDefinedShape fieldDescription : declaredFields) { fieldAccess.add(MethodVariableAccess.REFERENCE.loadOffset(0)); fieldAccess.add(FieldAccess.forField(fieldDescription).getter()); } List<StackManipulation> parameterAccess = new ArrayList<StackManipulation>(instrumentedMethod.getParameters().size() * 2); for (ParameterDescription parameterDescription : instrumentedMethod.getParameters()) { parameterAccess.add(MethodVariableAccess.of(parameterDescription.getType()).loadOffset(parameterDescription.getOffset())); parameterAccess.add(Assigner.DEFAULT.assign(parameterDescription.getType(), specializedLambdaMethod.getParameterTypes().get(parameterDescription.getIndex()).asGenericType(), Assigner.Typing.DYNAMIC)); } return new Size(new StackManipulation.Compound( new StackManipulation.Compound(fieldAccess), new StackManipulation.Compound(parameterAccess), MethodInvocation.invoke(targetMethod), MethodReturn.returning(targetMethod.getReturnType().asErasure()) ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Appender appender = (Appender) other; return targetMethod.equals(appender.targetMethod) && declaredFields.equals(appender.declaredFields) && specializedLambdaMethod.equals(appender.specializedLambdaMethod); } @Override public int hashCode() { int result = targetMethod.hashCode(); result = 31 * result + declaredFields.hashCode(); result = 31 * result + specializedLambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.LambdaMethodImplementation.Appender{" + "targetMethod=" + targetMethod + ", specializedLambdaMethod=" + specializedLambdaMethod + ", declaredFields=" + declaredFields + '}'; } } } /** * Implements the {@code writeReplace} method for serializable lambda expressions. */ protected static class SerializationImplementation implements Implementation { /** * The lambda expression's declaring type. */ private final TypeDescription targetType; /** * The lambda expression's functional type. */ private final TypeDescription lambdaType; /** * The lambda expression's functional method name. */ private final String lambdaMethodName; /** * The method type of the lambda expression's functional method. */ private final JavaConstant.MethodType lambdaMethod; /** * A handle that references the lambda expressions invocation target. */ private final JavaConstant.MethodHandle targetMethod; /** * The specialized method type of the lambda expression's functional method. */ private final JavaConstant.MethodType specializedMethod; /** * Creates a new implementation for a serializable's lambda expression's {@code writeReplace} method. * * @param targetType The lambda expression's declaring type. * @param lambdaType The lambda expression's functional type. * @param lambdaMethodName The lambda expression's functional method name. * @param lambdaMethod The method type of the lambda expression's functional method. * @param targetMethod A handle that references the lambda expressions invocation target. * @param specializedMethod The specialized method type of the lambda expression's functional method. */ protected SerializationImplementation(TypeDescription targetType, TypeDescription lambdaType, String lambdaMethodName, JavaConstant.MethodType lambdaMethod, JavaConstant.MethodHandle targetMethod, JavaConstant.MethodType specializedMethod) { this.targetType = targetType; this.lambdaType = lambdaType; this.lambdaMethodName = lambdaMethodName; this.lambdaMethod = lambdaMethod; this.targetMethod = targetMethod; this.specializedMethod = specializedMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { TypeDescription serializedLambda; try { serializedLambda = new TypeDescription.ForLoadedType(Class.forName("java.lang.invoke.SerializedLambda")); } catch (ClassNotFoundException exception) { throw new IllegalStateException("Cannot find class for lambda serialization", exception); } List<StackManipulation> lambdaArguments = new ArrayList<StackManipulation>(implementationTarget.getInstrumentedType().getDeclaredFields().size()); for (FieldDescription.InDefinedShape fieldDescription : implementationTarget.getInstrumentedType().getDeclaredFields()) { lambdaArguments.add(new StackManipulation.Compound(MethodVariableAccess.REFERENCE.loadOffset(0), FieldAccess.forField(fieldDescription).getter(), Assigner.DEFAULT.assign(fieldDescription.getType(), TypeDescription.Generic.OBJECT, Assigner.Typing.STATIC))); } return new ByteCodeAppender.Simple(new StackManipulation.Compound( TypeCreation.of(serializedLambda), Duplication.SINGLE, ClassConstant.of(targetType), new TextConstant(lambdaType.getInternalName()), new TextConstant(lambdaMethodName), new TextConstant(lambdaMethod.getDescriptor()), IntegerConstant.forValue(targetMethod.getHandleType().getIdentifier()), new TextConstant(targetMethod.getOwnerType().getInternalName()), new TextConstant(targetMethod.getName()), new TextConstant(targetMethod.getDescriptor()), new TextConstant(specializedMethod.getDescriptor()), ArrayFactory.forType(TypeDescription.Generic.OBJECT).withValues(lambdaArguments), MethodInvocation.invoke(serializedLambda.getDeclaredMethods().filter(isConstructor()).getOnly()), MethodReturn.REFERENCE )); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; SerializationImplementation that = (SerializationImplementation) other; return targetType.equals(that.targetType) && lambdaType.equals(that.lambdaType) && lambdaMethodName.equals(that.lambdaMethodName) && lambdaMethod.equals(that.lambdaMethod) && targetMethod.equals(that.targetMethod) && specializedMethod.equals(that.specializedMethod); } @Override public int hashCode() { int result = targetType.hashCode(); result = 31 * result + lambdaType.hashCode(); result = 31 * result + lambdaMethodName.hashCode(); result = 31 * result + lambdaMethod.hashCode(); result = 31 * result + targetMethod.hashCode(); result = 31 * result + specializedMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.SerializationImplementation{" + "targetType=" + targetType + ", lambdaType=" + lambdaType + ", lambdaMethodName='" + lambdaMethodName + '\'' + ", lambdaMethod=" + lambdaMethod + ", targetMethod=" + targetMethod + ", specializedMethod=" + specializedMethod + '}'; } } /** * Implements an explicit bridge method for a lambda expression. */ protected static class BridgeMethodImplementation implements Implementation { /** * The name of the lambda expression's functional method. */ private final String lambdaMethodName; /** * The actual type of the lambda expression's functional method. */ private final JavaConstant.MethodType lambdaMethod; /** * Creates a new bridge method implementation for a lambda expression. * * @param lambdaMethodName The name of the lambda expression's functional method. * @param lambdaMethod The actual type of the lambda expression's functional method. */ protected BridgeMethodImplementation(String lambdaMethodName, JavaConstant.MethodType lambdaMethod) { this.lambdaMethodName = lambdaMethodName; this.lambdaMethod = lambdaMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.invokeSuper(new MethodDescription.SignatureToken(lambdaMethodName, lambdaMethod.getReturnType(), lambdaMethod.getParameterTypes()))); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; BridgeMethodImplementation that = (BridgeMethodImplementation) other; return lambdaMethodName.equals(that.lambdaMethodName) && lambdaMethod.equals(that.lambdaMethod); } @Override public int hashCode() { int result = lambdaMethodName.hashCode(); result = 31 * result + lambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.BridgeMethodImplementation{" + "lambdaMethodName='" + lambdaMethodName + '\'' + ", lambdaMethod=" + lambdaMethod + '}'; } /** * An appender for implementing a bridge method for a lambda expression. */ protected static class Appender implements ByteCodeAppender { /** * The invocation of the bridge's target method. */ private final SpecialMethodInvocation bridgeTargetInvocation; /** * Creates a new appender for invoking a lambda expression's bridge method target. * * @param bridgeTargetInvocation The invocation of the bridge's target method. */ protected Appender(SpecialMethodInvocation bridgeTargetInvocation) { this.bridgeTargetInvocation = bridgeTargetInvocation; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return new Compound(new Simple( MethodVariableAccess.allArgumentsOf(instrumentedMethod) .asBridgeOf(bridgeTargetInvocation.getMethodDescription()) .prependThisReference(), bridgeTargetInvocation, bridgeTargetInvocation.getMethodDescription().getReturnType().asErasure().isAssignableTo(instrumentedMethod.getReturnType().asErasure()) ? StackManipulation.Trivial.INSTANCE : TypeCasting.to(instrumentedMethod.getReceiverType().asErasure()), MethodReturn.returning(instrumentedMethod.getReturnType().asErasure()) )).apply(methodVisitor, implementationContext, instrumentedMethod); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && bridgeTargetInvocation.equals(((Appender) other).bridgeTargetInvocation); } @Override public int hashCode() { return bridgeTargetInvocation.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.BridgeMethodImplementation.Appender{" + "bridgeTargetInvocation=" + bridgeTargetInvocation + '}'; } } } } /** * Implements the regular lambda meta factory. The implementation represents the following code: * <blockquote><pre> * public static CallSite metafactory(MethodHandles.Lookup caller, * String invokedName, * MethodType invokedType, * MethodType samMethodType, * MethodHandle implMethod, * MethodType instantiatedMethodType) throws Exception { * Unsafe unsafe = Unsafe.getUnsafe(); * {@code Class<?>} lambdaClass = unsafe.defineAnonymousClass(caller.lookupClass(), * (byte[]) ClassLoader.getSystemClassLoader().loadClass("net.bytebuddy.agent.builder.LambdaFactory").getDeclaredMethod("make", * Object.class, * String.class, * Object.class, * Object.class, * Object.class, * Object.class, * boolean.class, * List.class, * List.class).invoke(null, * caller, * invokedName, * invokedType, * samMethodType, * implMethod, * instantiatedMethodType, * false, * Collections.emptyList(), * Collections.emptyList()), * null); * unsafe.ensureClassInitialized(lambdaClass); * return invokedType.parameterCount() == 0 * ? new ConstantCallSite(MethodHandles.constant(invokedType.returnType(), lambdaClass.getDeclaredConstructors()[0].newInstance())) * : new ConstantCallSite(MethodHandles.Lookup.IMPL_LOOKUP.findStatic(lambdaClass, "get$Lambda", invokedType)); * </pre></blockquote> */ protected enum MetaFactoryRedirection implements AsmVisitorWrapper.ForDeclaredMethods.MethodVisitorWrapper { /** * The singleton instance. */ INSTANCE; @Override public MethodVisitor wrap(TypeDescription instrumentedType, MethodDescription.InDefinedShape methodDescription, MethodVisitor methodVisitor, ClassFileVersion classFileVersion, int writerFlags, int readerFlags) { methodVisitor.visitCode(); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "sun/misc/Unsafe", "getUnsafe", "()Lsun/misc/Unsafe;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/ClassLoader", "getSystemClassLoader", "()Ljava/lang/ClassLoader;", false); methodVisitor.visitLdcInsn("net.bytebuddy.agent.builder.LambdaFactory"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/ClassLoader", "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;", false); methodVisitor.visitLdcInsn("make"); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/String;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/Boolean", "TYPE", "Ljava/lang/Class;"); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredMethod", "(Ljava/lang/String;[Ljava/lang/Class;)Ljava/lang/reflect/Method;", false); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitVarInsn(Opcodes.ALOAD, 1); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ALOAD, 4); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 5); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Collections", "emptyList", "()Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Collections", "emptyList", "()Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Method", "invoke", "(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "[B"); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "defineAnonymousClass", "(Ljava/lang/Class;[B[Ljava/lang/Object;)Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "ensureClassInitialized", "(Ljava/lang/Class;)V", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "parameterCount", "()I", false); Label conditionalDefault = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFNE, conditionalDefault); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "returnType", "()Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredConstructors", "()[Ljava/lang/reflect/Constructor;", false); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Constructor", "newInstance", "([Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/invoke/MethodHandles", "constant", "(Ljava/lang/Class;Ljava/lang/Object;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); Label conditionalAlternative = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, conditionalAlternative); methodVisitor.visitLabel(conditionalDefault); methodVisitor.visitFrame(Opcodes.F_APPEND, 2, new Object[]{"sun/misc/Unsafe", "java/lang/Class"}, 0, null); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/invoke/MethodHandles$Lookup", "IMPL_LOOKUP", "Ljava/lang/invoke/MethodHandles$Lookup;"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitLdcInsn("get$Lambda"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); methodVisitor.visitLabel(conditionalAlternative); methodVisitor.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/invoke/CallSite"}); methodVisitor.visitInsn(Opcodes.ARETURN); methodVisitor.visitMaxs(8, 8); methodVisitor.visitEnd(); return IGNORE_ORIGINAL; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.MetaFactoryRedirection." + name(); } } /** * Implements the alternative lambda meta factory. The implementation represents the following code: * <blockquote><pre> * public static CallSite altMetafactory(MethodHandles.Lookup caller, * String invokedName, * MethodType invokedType, * Object... args) throws Exception { * int flags = (Integer) args[3]; * int argIndex = 4; * {@code Class<?>[]} markerInterface; * if ((flags {@code &} FLAG_MARKERS) != 0) { * int markerCount = (Integer) args[argIndex++]; * markerInterface = new {@code Class<?>}[markerCount]; * System.arraycopy(args, argIndex, markerInterface, 0, markerCount); * argIndex += markerCount; * } else { * markerInterface = new {@code Class<?>}[0]; * } * MethodType[] additionalBridge; * if ((flags {@code &} FLAG_BRIDGES) != 0) { * int bridgeCount = (Integer) args[argIndex++]; * additionalBridge = new MethodType[bridgeCount]; * System.arraycopy(args, argIndex, additionalBridge, 0, bridgeCount); * // argIndex += bridgeCount; * } else { * additionalBridge = new MethodType[0]; * } * Unsafe unsafe = Unsafe.getUnsafe(); * {@code Class<?>} lambdaClass = unsafe.defineAnonymousClass(caller.lookupClass(), * (byte[]) ClassLoader.getSystemClassLoader().loadClass("net.bytebuddy.agent.builder.LambdaFactory").getDeclaredMethod("make", * Object.class, * String.class, * Object.class, * Object.class, * Object.class, * Object.class, * boolean.class, * List.class, * List.class).invoke(null, * caller, * invokedName, * invokedType, * args[0], * args[1], * args[2], * (flags {@code &} FLAG_SERIALIZABLE) != 0, * Arrays.asList(markerInterface), * Arrays.asList(additionalBridge)), * null); * unsafe.ensureClassInitialized(lambdaClass); * return invokedType.parameterCount() == 0 * ? new ConstantCallSite(MethodHandles.constant(invokedType.returnType(), lambdaClass.getDeclaredConstructors()[0].newInstance())) * : new ConstantCallSite(MethodHandles.Lookup.IMPL_LOOKUP.findStatic(lambdaClass, "get$Lambda", invokedType)); * } * </pre></blockquote> */ protected enum AlternativeMetaFactoryRedirection implements AsmVisitorWrapper.ForDeclaredMethods.MethodVisitorWrapper { /** * The singleton instance. */ INSTANCE; @Override public MethodVisitor wrap(TypeDescription instrumentedType, MethodDescription.InDefinedShape methodDescription, MethodVisitor methodVisitor, ClassFileVersion classFileVersion, int writerFlags, int readerFlags) { methodVisitor.visitCode(); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 4); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ISTORE, 5); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitInsn(Opcodes.IAND); Label markerInterfaceLoop = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, markerInterfaceLoop); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitIincInsn(5, 1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 7); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitInsn(Opcodes.IADD); methodVisitor.visitVarInsn(Opcodes.ISTORE, 5); Label markerInterfaceExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, markerInterfaceExit); methodVisitor.visitLabel(markerInterfaceLoop); methodVisitor.visitFrame(Opcodes.F_APPEND, 2, new Object[]{Opcodes.INTEGER, Opcodes.INTEGER}, 0, null); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitLabel(markerInterfaceExit); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"[Ljava/lang/Class;"}, 0, null); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitInsn(Opcodes.IAND); Label additionalBridgesLoop = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, additionalBridgesLoop); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitIincInsn(5, 1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 8); methodVisitor.visitVarInsn(Opcodes.ILOAD, 8); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/invoke/MethodType"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ILOAD, 8); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false); Label additionalBridgesExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, additionalBridgesExit); methodVisitor.visitLabel(additionalBridgesLoop); methodVisitor.visitFrame(Opcodes.F_SAME, 0, null, 0, null); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/invoke/MethodType"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitLabel(additionalBridgesExit); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"[Ljava/lang/invoke/MethodType;"}, 0, null); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "sun/misc/Unsafe", "getUnsafe", "()Lsun/misc/Unsafe;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/ClassLoader", "getSystemClassLoader", "()Ljava/lang/ClassLoader;", false); methodVisitor.visitLdcInsn("net.bytebuddy.agent.builder.LambdaFactory"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/ClassLoader", "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;", false); methodVisitor.visitLdcInsn("make"); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/String;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/Boolean", "TYPE", "Ljava/lang/Class;"); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredMethod", "(Ljava/lang/String;[Ljava/lang/Class;)Ljava/lang/reflect/Method;", false); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitVarInsn(Opcodes.ALOAD, 1); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitInsn(Opcodes.IAND); Label callSiteConditional = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, callSiteConditional); methodVisitor.visitInsn(Opcodes.ICONST_1); Label callSiteAlternative = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, callSiteAlternative); methodVisitor.visitLabel(callSiteConditional); methodVisitor.visitFrame(Opcodes.F_FULL, 9, new Object[]{"java/lang/invoke/MethodHandles$Lookup", "java/lang/String", "java/lang/invoke/MethodType", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER, "[Ljava/lang/Class;", "[Ljava/lang/invoke/MethodType;", "sun/misc/Unsafe"}, 7, new Object[]{"sun/misc/Unsafe", "java/lang/Class", "java/lang/reflect/Method", Opcodes.NULL, "[Ljava/lang/Object;", "[Ljava/lang/Object;", Opcodes.INTEGER}); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLabel(callSiteAlternative); methodVisitor.visitFrame(Opcodes.F_FULL, 9, new Object[]{"java/lang/invoke/MethodHandles$Lookup", "java/lang/String", "java/lang/invoke/MethodType", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER, "[Ljava/lang/Class;", "[Ljava/lang/invoke/MethodType;", "sun/misc/Unsafe"}, 8, new Object[]{"sun/misc/Unsafe", "java/lang/Class", "java/lang/reflect/Method", Opcodes.NULL, "[Ljava/lang/Object;", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER}); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Arrays", "asList", "([Ljava/lang/Object;)Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Arrays", "asList", "([Ljava/lang/Object;)Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Method", "invoke", "(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "[B"); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "defineAnonymousClass", "(Ljava/lang/Class;[B[Ljava/lang/Object;)Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 9); methodVisitor.visitVarInsn(Opcodes.ALOAD, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "ensureClassInitialized", "(Ljava/lang/Class;)V", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "parameterCount", "()I", false); Label callSiteJump = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFNE, callSiteJump); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "returnType", "()Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredConstructors", "()[Ljava/lang/reflect/Constructor;", false); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Constructor", "newInstance", "([Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/invoke/MethodHandles", "constant", "(Ljava/lang/Class;Ljava/lang/Object;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); Label callSiteExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, callSiteExit); methodVisitor.visitLabel(callSiteJump); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"java/lang/Class"}, 0, null); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/invoke/MethodHandles$Lookup", "IMPL_LOOKUP", "Ljava/lang/invoke/MethodHandles$Lookup;"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitLdcInsn("get$Lambda"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); methodVisitor.visitLabel(callSiteExit); methodVisitor.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/invoke/CallSite"}); methodVisitor.visitInsn(Opcodes.ARETURN); methodVisitor.visitMaxs(9, 10); methodVisitor.visitEnd(); return IGNORE_ORIGINAL; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.AlternativeMetaFactoryRedirection." + name(); } } } /** * <p> * The default implementation of an {@link net.bytebuddy.agent.builder.AgentBuilder}. * </p> * <p> * By default, Byte Buddy ignores any types loaded by the bootstrap class loader and * any synthetic type. Self-injection and rebasing is enabled. In order to avoid class format changes, set * {@link AgentBuilder#disableBootstrapInjection()}). All types are parsed without their debugging information ({@link PoolStrategy.Default#FAST}). * </p> */ class Default implements AgentBuilder.Redefining { /** * The name of the Byte Buddy {@code net.bytebuddy.agent.Installer} class. */ private static final String INSTALLER_TYPE = "net.bytebuddy.agent.Installer"; /** * The name of the {@code net.bytebuddy.agent.Installer} field containing an installed {@link Instrumentation}. */ private static final String INSTRUMENTATION_FIELD = "instrumentation"; /** * Indicator for access to a static member via reflection to make the code more readable. */ private static final Object STATIC_FIELD = null; /** * The value that is to be returned from a {@link java.lang.instrument.ClassFileTransformer} to indicate * that no class file transformation is to be applied. */ private static final byte[] NO_TRANSFORMATION = null; /** * Indicates that a loaded type should be considered as non-available. */ private static final Class<?> NO_LOADED_TYPE = null; /** * The {@link net.bytebuddy.ByteBuddy} instance to be used. */ private final ByteBuddy byteBuddy; /** * The listener to notify on transformations. */ private final Listener listener; /** * The type locator to use. */ private final PoolStrategy poolStrategy; /** * The definition handler to use. */ private final TypeStrategy typeStrategy; /** * The location strategy to use. */ private final LocationStrategy locationStrategy; /** * The native method strategy to use. */ private final NativeMethodStrategy nativeMethodStrategy; /** * The initialization strategy to use for creating classes. */ private final InitializationStrategy initializationStrategy; /** * The redefinition strategy to apply. */ private final RedefinitionStrategy redefinitionStrategy; /** * The batch allocator for the redefinition strategy to apply. */ private final RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator; /** * The failure handler for the redefinition strategy to apply. */ private final RedefinitionStrategy.FailureHandler redefinitionFailureHandler; /** * The redefinition listener for the redefinition strategy to apply. */ private final RedefinitionStrategy.Listener redefinitionListener; /** * The injection strategy for injecting classes into the bootstrap class loader. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * A strategy to determine of the {@code LambdaMetafactory} should be instrumented to allow for the instrumentation * of classes that represent lambda expressions. */ private final LambdaInstrumentationStrategy lambdaInstrumentationStrategy; /** * The description strategy for resolving type descriptions for types. */ private final DescriptionStrategy descriptionStrategy; /** * The installation strategy to use. */ private final InstallationStrategy installationStrategy; /** * The fallback strategy to apply. */ private final FallbackStrategy fallbackStrategy; /** * Identifies types that should not be instrumented. */ private final RawMatcher ignoredTypeMatcher; /** * The transformation object for handling type transformations. */ private final Transformation transformation; /** * Creates a new default agent builder that uses a default {@link net.bytebuddy.ByteBuddy} instance for creating classes. */ public Default() { this(new ByteBuddy()); } /** * Creates a new agent builder with default settings. By default, Byte Buddy ignores any types loaded by the bootstrap class loader, any * type within a {@code net.bytebuddy} package and any synthetic type. Self-injection and rebasing is enabled. In order to avoid class format * changes, set {@link AgentBuilder#disableBootstrapInjection()}). All types are parsed without their debugging information * ({@link PoolStrategy.Default#FAST}). * * @param byteBuddy The Byte Buddy instance to be used. */ public Default(ByteBuddy byteBuddy) { this(byteBuddy, Listener.NoOp.INSTANCE, PoolStrategy.Default.FAST, TypeStrategy.Default.REBASE, LocationStrategy.ForClassLoader.STRONG, NativeMethodStrategy.Disabled.INSTANCE, InitializationStrategy.SelfInjection.SPLIT, RedefinitionStrategy.DISABLED, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, RedefinitionStrategy.FailureHandler.Default.FAIL_FAST, RedefinitionStrategy.Listener.NoOp.INSTANCE, BootstrapInjectionStrategy.Disabled.INSTANCE, LambdaInstrumentationStrategy.DISABLED, DescriptionStrategy.Default.HYBRID, InstallationStrategy.Default.ESCALATING, FallbackStrategy.ByThrowableType.ofOptionalTypes(), new RawMatcher.Disjunction(new RawMatcher.ForElementMatchers(any(), isBootstrapClassLoader(), any()), new RawMatcher.ForElementMatchers(nameStartsWith("net.bytebuddy.").or(nameStartsWith("sun.reflect.")).<TypeDescription>or(isSynthetic()), any(), any())), Transformation.Ignored.INSTANCE); } /** * Creates a new default agent builder. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param redefinitionStrategy The redefinition strategy to apply. * @param redefinitionBatchAllocator The batch allocator for the redefinition strategy to apply. * @param redefinitionFailureHandler The failure handler for the redefinition strategy to apply. * @param redefinitionListener The redefinition listener for the redefinition strategy to apply. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param lambdaInstrumentationStrategy A strategy to determine of the {@code LambdaMetafactory} should be instrumented to allow for the * instrumentation of classes that represent lambda expressions. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param installationStrategy The installation strategy to use. * @param fallbackStrategy The fallback strategy to apply. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. */ protected Default(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, RedefinitionStrategy redefinitionStrategy, RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator, RedefinitionStrategy.FailureHandler redefinitionFailureHandler, RedefinitionStrategy.Listener redefinitionListener, BootstrapInjectionStrategy bootstrapInjectionStrategy, LambdaInstrumentationStrategy lambdaInstrumentationStrategy, DescriptionStrategy descriptionStrategy, InstallationStrategy installationStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { this.byteBuddy = byteBuddy; this.poolStrategy = poolStrategy; this.typeStrategy = typeStrategy; this.locationStrategy = locationStrategy; this.listener = listener; this.nativeMethodStrategy = nativeMethodStrategy; this.initializationStrategy = initializationStrategy; this.redefinitionStrategy = redefinitionStrategy; this.redefinitionBatchAllocator = redefinitionBatchAllocator; this.redefinitionFailureHandler = redefinitionFailureHandler; this.redefinitionListener = redefinitionListener; this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.lambdaInstrumentationStrategy = lambdaInstrumentationStrategy; this.descriptionStrategy = descriptionStrategy; this.installationStrategy = installationStrategy; this.fallbackStrategy = fallbackStrategy; this.ignoredTypeMatcher = ignoredTypeMatcher; this.transformation = transformation; } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. As {@link EntryPoint}, {@link EntryPoint.Default#REBASE} is implied. * * @param plugin The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(Plugin... plugin) { return of(Arrays.asList(plugin)); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. As {@link EntryPoint}, {@link EntryPoint.Default#REBASE} is implied. * * @param plugins The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(List<? extends Plugin> plugins) { return of(EntryPoint.Default.REBASE, plugins); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. * * @param entryPoint The build entry point to use. * @param plugin The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(EntryPoint entryPoint, Plugin... plugin) { return of(entryPoint, Arrays.asList(plugin)); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. * * @param entryPoint The build entry point to use. * @param plugins The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(EntryPoint entryPoint, List<? extends Plugin> plugins) { AgentBuilder agentBuilder = new AgentBuilder.Default(entryPoint.getByteBuddy()).with(new TypeStrategy.ForBuildEntryPoint(entryPoint)); for (Plugin plugin : plugins) { agentBuilder = agentBuilder.type(plugin).transform(new Transformer.ForBuildPlugin(plugin)); } return agentBuilder; } @Override public AgentBuilder with(ByteBuddy byteBuddy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(Listener listener) { return new Default(byteBuddy, new Listener.Compound(this.listener, listener), poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(TypeStrategy typeStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(PoolStrategy poolStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(LocationStrategy locationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder enableNativeMethodPrefix(String prefix) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, NativeMethodStrategy.ForPrefix.of(prefix), initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableNativeMethodPrefix() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, NativeMethodStrategy.Disabled.INSTANCE, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy redefinitionStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, RedefinitionStrategy.FailureHandler.Default.FAIL_FAST, RedefinitionStrategy.Listener.NoOp.INSTANCE, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.FailureHandler redefinitionFailureHandler) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.Listener redefinitionListener) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, new RedefinitionStrategy.Listener.Compound(this.redefinitionListener, redefinitionListener), bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(InitializationStrategy initializationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(DescriptionStrategy descriptionStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(InstallationStrategy installationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(FallbackStrategy fallbackStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, new BootstrapInjectionStrategy.Enabled(folder, instrumentation), lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableBootstrapInjection() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, BootstrapInjectionStrategy.Disabled.INSTANCE, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableClassFormatChanges() { return new Default(byteBuddy.with(Implementation.Context.Disabled.Factory.INSTANCE), listener, poolStrategy, TypeStrategy.Default.REDEFINE_DECLARED_ONLY, locationStrategy, NativeMethodStrategy.Disabled.INSTANCE, InitializationStrategy.NoOp.INSTANCE, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type) { return JavaModule.isSupported() ? with(Listener.ModuleReadEdgeCompleting.of(instrumentation, false, type)) : this; } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module) { return assureReadEdgeTo(instrumentation, Arrays.asList(module)); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return with(new Listener.ModuleReadEdgeCompleting(instrumentation, false, new HashSet<JavaModule>(modules))); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type) { return JavaModule.isSupported() ? with(Listener.ModuleReadEdgeCompleting.of(instrumentation, true, type)) : this; } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module) { return assureReadEdgeFromAndTo(instrumentation, Arrays.asList(module)); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return with(new Listener.ModuleReadEdgeCompleting(instrumentation, true, new HashSet<JavaModule>(modules))); } @Override public Identified.Narrowable type(RawMatcher matcher) { return new Transforming(matcher, Transformer.NoOp.INSTANCE, false); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher) { return type(typeMatcher, any()); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return type(typeMatcher, classLoaderMatcher, any()); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return type(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, not(supportsModules()).or(moduleMatcher))); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher) { return ignore(typeMatcher, any()); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return ignore(typeMatcher, classLoaderMatcher, any()); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return ignore(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, not(supportsModules()).or(moduleMatcher))); } @Override public Ignored ignore(RawMatcher rawMatcher) { return new Ignoring(rawMatcher); } @Override public ResettableClassFileTransformer makeRaw() { return ExecutingTransformer.FACTORY.make(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public ResettableClassFileTransformer installOn(Instrumentation instrumentation) { ResettableClassFileTransformer classFileTransformer = makeRaw(); instrumentation.addTransformer(classFileTransformer, redefinitionStrategy.isRetransforming(instrumentation)); try { if (nativeMethodStrategy.isEnabled(instrumentation)) { instrumentation.setNativeMethodPrefix(classFileTransformer, nativeMethodStrategy.getPrefix()); } lambdaInstrumentationStrategy.apply(byteBuddy, instrumentation, classFileTransformer); if (redefinitionStrategy.isEnabled()) { RedefinitionStrategy.Delegate<?> delegate = redefinitionStrategy.make(transformation); for (Class<?> type : instrumentation.getAllLoadedClasses()) { JavaModule module = JavaModule.ofType(type); try { TypePool typePool = poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader()); try { delegate.consider(ignoredTypeMatcher, listener, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), type, typePool), type, type, module, !instrumentation.isModifiableClass(type)); } catch (Throwable throwable) { if (descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(type, throwable)) { delegate.consider(ignoredTypeMatcher, listener, typePool.describe(TypeDescription.ForLoadedType.getName(type)).resolve(), type, NO_LOADED_TYPE, module); } else { throw throwable; } } } catch (Throwable throwable) { try { try { listener.onError(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module, throwable); } finally { listener.onComplete(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module); } } catch (Throwable ignored) { // Ignore exceptions that are thrown by listeners to mimic the behavior of a transformation. } } } delegate.apply(instrumentation, locationStrategy, listener, redefinitionBatchAllocator, redefinitionListener, redefinitionFailureHandler); } return classFileTransformer; } catch (Throwable throwable) { return installationStrategy.onError(instrumentation, classFileTransformer, throwable); } } @Override public ResettableClassFileTransformer installOnByteBuddyAgent() { try { Instrumentation instrumentation = (Instrumentation) ClassLoader.getSystemClassLoader() .loadClass(INSTALLER_TYPE) .getDeclaredField(INSTRUMENTATION_FIELD) .get(STATIC_FIELD); if (instrumentation == null) { throw new IllegalStateException("The Byte Buddy agent is not installed"); } return installOn(instrumentation); } catch (RuntimeException exception) { throw exception; } catch (Exception exception) { throw new IllegalStateException("The Byte Buddy agent is not installed or not accessible", exception); } } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Default aDefault = (Default) other; return byteBuddy.equals(aDefault.byteBuddy) && listener.equals(aDefault.listener) && poolStrategy.equals(aDefault.poolStrategy) && nativeMethodStrategy.equals(aDefault.nativeMethodStrategy) && typeStrategy.equals(aDefault.typeStrategy) && locationStrategy.equals(aDefault.locationStrategy) && initializationStrategy == aDefault.initializationStrategy && redefinitionStrategy == aDefault.redefinitionStrategy && redefinitionBatchAllocator.equals(aDefault.redefinitionBatchAllocator) && redefinitionFailureHandler.equals(aDefault.redefinitionFailureHandler) && redefinitionListener.equals(aDefault.redefinitionListener) && bootstrapInjectionStrategy.equals(aDefault.bootstrapInjectionStrategy) && lambdaInstrumentationStrategy.equals(aDefault.lambdaInstrumentationStrategy) && descriptionStrategy.equals(aDefault.descriptionStrategy) && installationStrategy.equals(aDefault.installationStrategy) && fallbackStrategy.equals(aDefault.fallbackStrategy) && ignoredTypeMatcher.equals(aDefault.ignoredTypeMatcher) && transformation.equals(aDefault.transformation); } @Override public int hashCode() { int result = byteBuddy.hashCode(); result = 31 * result + listener.hashCode(); result = 31 * result + poolStrategy.hashCode(); result = 31 * result + typeStrategy.hashCode(); result = 31 * result + locationStrategy.hashCode(); result = 31 * result + nativeMethodStrategy.hashCode(); result = 31 * result + initializationStrategy.hashCode(); result = 31 * result + redefinitionStrategy.hashCode(); result = 31 * result + redefinitionBatchAllocator.hashCode(); result = 31 * result + redefinitionFailureHandler.hashCode(); result = 31 * result + redefinitionListener.hashCode(); result = 31 * result + bootstrapInjectionStrategy.hashCode(); result = 31 * result + lambdaInstrumentationStrategy.hashCode(); result = 31 * result + descriptionStrategy.hashCode(); result = 31 * result + installationStrategy.hashCode(); result = 31 * result + fallbackStrategy.hashCode(); result = 31 * result + ignoredTypeMatcher.hashCode(); result = 31 * result + transformation.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default{" + "byteBuddy=" + byteBuddy + ", listener=" + listener + ", poolStrategy=" + poolStrategy + ", typeStrategy=" + typeStrategy + ", locationStrategy=" + locationStrategy + ", nativeMethodStrategy=" + nativeMethodStrategy + ", initializationStrategy=" + initializationStrategy + ", redefinitionStrategy=" + redefinitionStrategy + ", redefinitionBatchAllocator=" + redefinitionBatchAllocator + ", redefinitionFailureHandler=" + redefinitionFailureHandler + ", redefinitionListener=" + redefinitionListener + ", bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", lambdaInstrumentationStrategy=" + lambdaInstrumentationStrategy + ", descriptionStrategy=" + descriptionStrategy + ", installationStrategy=" + installationStrategy + ", fallbackStrategy=" + fallbackStrategy + ", ignoredTypeMatcher=" + ignoredTypeMatcher + ", transformation=" + transformation + '}'; } /** * An injection strategy for injecting classes into the bootstrap class loader. */ protected interface BootstrapInjectionStrategy { /** * Creates an injector for the bootstrap class loader. * * @param protectionDomain The protection domain to be used. * @return A class injector for the bootstrap class loader. */ ClassInjector make(ProtectionDomain protectionDomain); /** * A disabled bootstrap injection strategy. */ enum Disabled implements BootstrapInjectionStrategy { /** * The singleton instance. */ INSTANCE; @Override public ClassInjector make(ProtectionDomain protectionDomain) { throw new IllegalStateException("Injecting classes into the bootstrap class loader was not enabled"); } @Override public String toString() { return "AgentBuilder.Default.BootstrapInjectionStrategy.Disabled." + name(); } } /** * An enabled bootstrap injection strategy. */ class Enabled implements BootstrapInjectionStrategy { /** * The folder in which jar files are to be saved. */ private final File folder; /** * The instrumentation to use for appending jar files. */ private final Instrumentation instrumentation; /** * Creates a new enabled bootstrap class loader injection strategy. * * @param folder The folder in which jar files are to be saved. * @param instrumentation The instrumentation to use for appending jar files. */ public Enabled(File folder, Instrumentation instrumentation) { this.folder = folder; this.instrumentation = instrumentation; } @Override public ClassInjector make(ProtectionDomain protectionDomain) { return ClassInjector.UsingInstrumentation.of(folder, ClassInjector.UsingInstrumentation.Target.BOOTSTRAP, instrumentation); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Enabled enabled = (Enabled) other; return folder.equals(enabled.folder) && instrumentation.equals(enabled.instrumentation); } @Override public int hashCode() { int result = folder.hashCode(); result = 31 * result + instrumentation.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.BootstrapInjectionStrategy.Enabled{" + "folder=" + folder + ", instrumentation=" + instrumentation + '}'; } } } /** * A strategy for determining if a native method name prefix should be used when rebasing methods. */ protected interface NativeMethodStrategy { /** * Determines if this strategy enables name prefixing for native methods. * * @param instrumentation The instrumentation used. * @return {@code true} if this strategy indicates that a native method prefix should be used. */ boolean isEnabled(Instrumentation instrumentation); /** * Resolves the method name transformer for this strategy. * * @return A method name transformer for this strategy. */ MethodNameTransformer resolve(); /** * Returns the method prefix if the strategy is enabled. This method must only be called if this strategy enables prefixing. * * @return The method prefix. */ String getPrefix(); /** * A native method strategy that suffixes method names with a random suffix and disables native method rebasement. */ enum Disabled implements NativeMethodStrategy { /** * The singleton instance. */ INSTANCE; @Override public MethodNameTransformer resolve() { return MethodNameTransformer.Suffixing.withRandomSuffix(); } @Override public boolean isEnabled(Instrumentation instrumentation) { return false; } @Override public String getPrefix() { throw new IllegalStateException("A disabled native method strategy does not define a method name prefix"); } @Override public String toString() { return "AgentBuilder.Default.NativeMethodStrategy.Disabled." + name(); } } /** * A native method strategy that prefixes method names with a fixed value for supporting rebasing of native methods. */ class ForPrefix implements NativeMethodStrategy { /** * The method name prefix. */ private final String prefix; /** * Creates a new name prefixing native method strategy. * * @param prefix The method name prefix. */ protected ForPrefix(String prefix) { this.prefix = prefix; } /** * Creates a new native method strategy for prefixing method names. * * @param prefix The method name prefix. * @return An appropriate native method strategy. */ protected static NativeMethodStrategy of(String prefix) { if (prefix.length() == 0) { throw new IllegalArgumentException("A method name prefix must not be the empty string"); } return new ForPrefix(prefix); } @Override public MethodNameTransformer resolve() { return new MethodNameTransformer.Prefixing(prefix); } @Override public boolean isEnabled(Instrumentation instrumentation) { if (!instrumentation.isNativeMethodPrefixSupported()) { throw new IllegalArgumentException("A prefix for native methods is not supported: " + instrumentation); } return true; } @Override public String getPrefix() { return prefix; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && prefix.equals(((ForPrefix) other).prefix); } @Override public int hashCode() { return prefix.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.NativeMethodStrategy.ForPrefix{" + "prefix='" + prefix + '\'' + '}'; } } } /** * A transformation serves as a handler for modifying a class. */ protected interface Transformation { /** * Checks if this transformation is alive. * * @param typeDescription A description of the type that is to be transformed. * @param classLoader The class loader of the type being transformed. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined In case of a type redefinition, the loaded type being transformed or {@code null} if that is not the case. * @param protectionDomain The protection domain of the type being transformed. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @return {@code true} if this transformation is alive. */ boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher); /** * Resolves an attempted transformation to a specific transformation. * * @param typeDescription A description of the type that is to be transformed. * @param classLoader The class loader of the type being transformed. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined In case of a type redefinition, the loaded type being transformed or {@code null} if that is not the case. * @param protectionDomain The protection domain of the type being transformed. * @param typePool The type pool to apply during type creation. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @return A resolution for the given type. */ Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher); /** * A resolution to a transformation. */ interface Resolution { /** * Returns the sort of this resolution. * * @return The sort of this resolution. */ Sort getSort(); /** * Resolves this resolution as a decorator of the supplied resolution. * * @param resolution The resolution for which this resolution should serve as a decorator. * @return A resolution where this resolution is applied as a decorator if this resolution is alive. */ Resolution asDecoratorOf(Resolution resolution); /** * Resolves this resolution as a decorator of the supplied resolution. * * @param resolution The resolution for which this resolution should serve as a decorator. * @return A resolution where this resolution is applied as a decorator if this resolution is alive. */ Resolution prepend(Decoratable resolution); /** * Transforms a type or returns {@code null} if a type is not to be transformed. * * @param initializationStrategy The initialization strategy to use. * @param classFileLocator The class file locator to use. * @param typeStrategy The definition handler to use. * @param byteBuddy The Byte Buddy instance to use. * @param methodNameTransformer The method name transformer to be used. * @param bootstrapInjectionStrategy The bootstrap injection strategy to be used. * @param accessControlContext The access control context to be used. * @param listener The listener to be invoked to inform about an applied or non-applied transformation. * @return The class file of the transformed class or {@code null} if no transformation is attempted. */ byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener); /** * Describes a specific sort of a {@link Resolution}. */ enum Sort { /** * A terminal resolution. After discovering such a resolution, no further transformers are considered. */ TERMINAL(true), /** * A resolution that can serve as a decorator for another resolution. After discovering such a resolution * further transformations are considered where the represented resolution is prepended if applicable. */ DECORATOR(true), /** * A non-resolved resolution. */ UNDEFINED(false); /** * Indicates if this sort represents an active resolution. */ private final boolean alive; /** * Creates a new resolution sort. * * @param alive Indicates if this sort represents an active resolution. */ Sort(boolean alive) { this.alive = alive; } /** * Returns {@code true} if this resolution is alive. * * @return {@code true} if this resolution is alive. */ protected boolean isAlive() { return alive; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Resolution.Sort." + name(); } } /** * A resolution that can be decorated by a transformer. */ interface Decoratable extends Resolution { /** * Appends the supplied transformer to this resolution. * * @param transformer The transformer to append to the transformer that is represented bz this instance. * @return A new resolution with the supplied transformer appended to this transformer. */ Resolution append(Transformer transformer); } /** * A canonical implementation of a non-resolved resolution. */ class Unresolved implements Resolution { /** * The type that is not transformed. */ private final TypeDescription typeDescription; /** * The unresolved type's class loader. */ private final ClassLoader classLoader; /** * The non-transformed type's module or {@code null} if the current VM does not support modules. */ private final JavaModule module; /** * Creates a new unresolved resolution. * * @param typeDescription The type that is not transformed. * @param classLoader The unresolved type's class loader. * @param module The non-transformed type's module or {@code null} if the current VM does not support modules. */ protected Unresolved(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { this.typeDescription = typeDescription; this.classLoader = classLoader; this.module = module; } @Override public Sort getSort() { return Sort.UNDEFINED; } @Override public Resolution asDecoratorOf(Resolution resolution) { return resolution; } @Override public Resolution prepend(Decoratable resolution) { return resolution; } @Override public byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener) { listener.onIgnored(typeDescription, classLoader, module); return NO_TRANSFORMATION; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Unresolved that = (Unresolved) object; return typeDescription.equals(that.typeDescription) && (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (module != null ? module.equals(that.module) : that.module == null); } @Override public int hashCode() { int result = typeDescription.hashCode(); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (module != null ? module.hashCode() : 0); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Resolution.Unresolved{" + "typeDescription=" + typeDescription + ", classLoader=" + classLoader + ", module=" + module + '}'; } } } /** * A transformation that does not attempt to transform any type. */ enum Ignored implements Transformation { /** * The singleton instance. */ INSTANCE; @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { return false; } @Override public Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { return new Resolution.Unresolved(typeDescription, classLoader, module); } @Override public String toString() { return "AgentBuilder.Default.Transformation.Ignored." + name(); } } /** * A simple, active transformation. */ class Simple implements Transformation { /** * The raw matcher that is represented by this transformation. */ private final RawMatcher rawMatcher; /** * The transformer that is represented by this transformation. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new transformation. * * @param rawMatcher The raw matcher that is represented by this transformation. * @param transformer The transformer that is represented by this transformation. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Simple(RawMatcher rawMatcher, Transformer transformer, boolean decorator) { this.rawMatcher = rawMatcher; this.transformer = transformer; this.decorator = decorator; } @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { return !ignoredTypeMatcher.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) && rawMatcher.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public Transformation.Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { return isAlive(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, ignoredTypeMatcher) ? new Resolution(typeDescription, classLoader, module, protectionDomain, typePool, transformer, decorator) : new Transformation.Resolution.Unresolved(typeDescription, classLoader, module); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && decorator == ((Simple) other).decorator && rawMatcher.equals(((Simple) other).rawMatcher) && transformer.equals(((Simple) other).transformer); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + transformer.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple{" + "rawMatcher=" + rawMatcher + ", transformer=" + transformer + ", decorator=" + decorator + '}'; } /** * A resolution that performs a type transformation. */ protected static class Resolution implements Transformation.Resolution.Decoratable { /** * A description of the transformed type. */ private final TypeDescription typeDescription; /** * The class loader of the transformed type. */ private final ClassLoader classLoader; /** * The transformed type's module or {@code null} if the current VM does not support modules. */ private final JavaModule module; /** * The protection domain of the transformed type. */ private final ProtectionDomain protectionDomain; /** * The type pool to apply during type creation. */ private final TypePool typePool; /** * The transformer to be applied. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new active transformation. * * @param typeDescription A description of the transformed type. * @param classLoader The class loader of the transformed type. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param protectionDomain The protection domain of the transformed type. * @param typePool The type pool to apply during type creation. * @param transformer The transformer to be applied. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Resolution(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, ProtectionDomain protectionDomain, TypePool typePool, Transformer transformer, boolean decorator) { this.typeDescription = typeDescription; this.classLoader = classLoader; this.module = module; this.protectionDomain = protectionDomain; this.typePool = typePool; this.transformer = transformer; this.decorator = decorator; } @Override public Sort getSort() { return decorator ? Sort.DECORATOR : Sort.TERMINAL; } @Override public Transformation.Resolution asDecoratorOf(Transformation.Resolution resolution) { return resolution.prepend(this); } @Override public Transformation.Resolution prepend(Decoratable resolution) { return resolution.append(transformer); } @Override public Transformation.Resolution append(Transformer transformer) { return new Resolution(typeDescription, classLoader, module, protectionDomain, typePool, new Transformer.Compound(this.transformer, transformer), decorator); } @Override public byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener) { InitializationStrategy.Dispatcher dispatcher = initializationStrategy.dispatcher(); DynamicType.Unloaded<?> dynamicType = dispatcher.apply(transformer.transform(typeStrategy.builder(typeDescription, byteBuddy, classFileLocator, methodNameTransformer.resolve()), typeDescription, classLoader)).make(TypeResolutionStrategy.Disabled.INSTANCE, typePool); dispatcher.register(dynamicType, classLoader, new BootstrapClassLoaderCapableInjectorFactory(bootstrapInjectionStrategy, classLoader, protectionDomain)); listener.onTransformation(typeDescription, classLoader, module, dynamicType); return dynamicType.getBytes(); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Resolution that = (Resolution) other; return typeDescription.equals(that.typeDescription) && decorator == that.decorator && !(classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) && !(module != null ? !module.equals(that.module) : that.module != null) && !(protectionDomain != null ? !protectionDomain.equals(that.protectionDomain) : that.protectionDomain != null) && typePool.equals(that.typePool) && transformer.equals(that.transformer); } @Override public int hashCode() { int result = typeDescription.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (module != null ? module.hashCode() : 0); result = 31 * result + (protectionDomain != null ? protectionDomain.hashCode() : 0); result = 31 * result + transformer.hashCode(); result = 31 * result + typePool.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple.Resolution{" + "typeDescription=" + typeDescription + ", classLoader=" + classLoader + ", module=" + module + ", protectionDomain=" + protectionDomain + ", typePool=" + typePool + ", transformer=" + transformer + ", decorator=" + decorator + '}'; } /** * An injector factory that resolves to a bootstrap class loader injection if this is necessary and enabled. */ protected static class BootstrapClassLoaderCapableInjectorFactory implements InitializationStrategy.Dispatcher.InjectorFactory { /** * The bootstrap injection strategy being used. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * The class loader for which to create an injection factory. */ private final ClassLoader classLoader; /** * The protection domain of the created classes. */ private final ProtectionDomain protectionDomain; /** * Creates a new bootstrap class loader capable injector factory. * * @param bootstrapInjectionStrategy The bootstrap injection strategy being used. * @param classLoader The class loader for which to create an injection factory. * @param protectionDomain The protection domain of the created classes. */ protected BootstrapClassLoaderCapableInjectorFactory(BootstrapInjectionStrategy bootstrapInjectionStrategy, ClassLoader classLoader, ProtectionDomain protectionDomain) { this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.classLoader = classLoader; this.protectionDomain = protectionDomain; } @Override public ClassInjector resolve() { return classLoader == null ? bootstrapInjectionStrategy.make(protectionDomain) : new ClassInjector.UsingReflection(classLoader, protectionDomain); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; BootstrapClassLoaderCapableInjectorFactory that = (BootstrapClassLoaderCapableInjectorFactory) other; return bootstrapInjectionStrategy.equals(that.bootstrapInjectionStrategy) && !(classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) && !(protectionDomain != null ? !protectionDomain.equals(that.protectionDomain) : that.protectionDomain != null); } @Override public int hashCode() { int result = bootstrapInjectionStrategy.hashCode(); result = 31 * result + (protectionDomain != null ? protectionDomain.hashCode() : 0); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple.Resolution.BootstrapClassLoaderCapableInjectorFactory{" + "bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", classLoader=" + classLoader + ", protectionDomain=" + protectionDomain + '}'; } } } } /** * A compound transformation that applied several transformation in the given order and applies the first active transformation. */ class Compound implements Transformation { /** * The list of transformations to apply in their application order. */ private final List<? extends Transformation> transformations; /** * Creates a new compound transformation. * * @param transformation An array of transformations to apply in their application order. */ protected Compound(Transformation... transformation) { this(Arrays.asList(transformation)); } /** * Creates a new compound transformation. * * @param transformations A list of transformations to apply in their application order. */ protected Compound(List<? extends Transformation> transformations) { this.transformations = transformations; } @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { for (Transformation transformation : transformations) { if (transformation.isAlive(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, ignoredTypeMatcher)) { return true; } } return false; } @Override public Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { Resolution current = new Resolution.Unresolved(typeDescription, classLoader, module); for (Transformation transformation : transformations) { Resolution resolution = transformation.resolve(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, typePool, ignoredTypeMatcher); switch (resolution.getSort()) { case TERMINAL: return current.asDecoratorOf(resolution); case DECORATOR: current = current.asDecoratorOf(resolution); break; case UNDEFINED: break; default: throw new IllegalStateException("Unexpected resolution type: " + resolution.getSort()); } } return current; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && transformations.equals(((Compound) other).transformations); } @Override public int hashCode() { return transformations.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.Transformation.Compound{" + "transformations=" + transformations + '}'; } } } /** * A {@link java.lang.instrument.ClassFileTransformer} that implements the enclosing agent builder's * configuration. */ protected static class ExecutingTransformer extends ResettableClassFileTransformer.AbstractBase { /** * A factory for creating a {@link ClassFileTransformer} that supports the features of the current VM. */ protected static final Factory FACTORY = AccessController.doPrivileged(FactoryCreationOption.INSTANCE); /** * The Byte Buddy instance to be used. */ private final ByteBuddy byteBuddy; /** * The type locator to use. */ private final PoolStrategy poolStrategy; /** * The definition handler to use. */ private final TypeStrategy typeStrategy; /** * The listener to notify on transformations. */ private final Listener listener; /** * The native method strategy to apply. */ private final NativeMethodStrategy nativeMethodStrategy; /** * The initialization strategy to use for transformed types. */ private final InitializationStrategy initializationStrategy; /** * The injection strategy for injecting classes into the bootstrap class loader. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * The description strategy for resolving type descriptions for types. */ private final DescriptionStrategy descriptionStrategy; /** * The location strategy to use. */ private final LocationStrategy locationStrategy; /** * The fallback strategy to use. */ private final FallbackStrategy fallbackStrategy; /** * Identifies types that should not be instrumented. */ private final RawMatcher ignoredTypeMatcher; /** * The transformation object for handling type transformations. */ private final Transformation transformation; /** * The access control context to use for loading classes. */ private final AccessControlContext accessControlContext; /** * Creates a new class file transformer. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param fallbackStrategy The fallback strategy to use. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. */ public ExecutingTransformer(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { this.byteBuddy = byteBuddy; this.typeStrategy = typeStrategy; this.poolStrategy = poolStrategy; this.locationStrategy = locationStrategy; this.listener = listener; this.nativeMethodStrategy = nativeMethodStrategy; this.initializationStrategy = initializationStrategy; this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.descriptionStrategy = descriptionStrategy; this.fallbackStrategy = fallbackStrategy; this.ignoredTypeMatcher = ignoredTypeMatcher; this.transformation = transformation; accessControlContext = AccessController.getContext(); } @Override public byte[] transform(ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { return AccessController.doPrivileged(new LegacyVmDispatcher(classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation), accessControlContext); } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. Invoking this method * allows to process module information which is available since Java 9. * * @param rawModule The instrumented class's Java {@code java.lang.reflect.Module}. * @param classLoader The type's class loader or {@code null} if the type is loaded by the bootstrap loader. * @param internalTypeName The internal name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param binaryRepresentation The class file of the instrumented class in its current state. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ protected byte[] transform(Object rawModule, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { return AccessController.doPrivileged(new Java9CapableVmDispatcher(rawModule, classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation), accessControlContext); } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. * * @param module The instrumented class's Java module in its wrapped form or {@code null} if the current VM does not support modules. * @param classLoader The instrumented class's class loader. * @param internalTypeName The internal name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param binaryRepresentation The class file of the instrumented class in its current state. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ private byte[] transform(JavaModule module, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { if (internalTypeName == null) { return NO_TRANSFORMATION; } String typeName = internalTypeName.replace('/', '.'); try { ClassFileLocator classFileLocator = ClassFileLocator.Simple.of(typeName, binaryRepresentation, locationStrategy.classFileLocator(classLoader, module)); TypePool typePool = poolStrategy.typePool(classFileLocator, classLoader); try { return doTransform(module, classLoader, typeName, classBeingRedefined, protectionDomain, typePool, classFileLocator); } catch (Throwable throwable) { if (classBeingRedefined != null && descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(classBeingRedefined, throwable)) { return doTransform(module, classLoader, typeName, NO_LOADED_TYPE, protectionDomain, typePool, classFileLocator); } else { throw throwable; } } } catch (Throwable throwable) { listener.onError(typeName, classLoader, module, throwable); return NO_TRANSFORMATION; } finally { listener.onComplete(typeName, classLoader, module); } } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. * * @param module The instrumented class's Java module in its wrapped form or {@code null} if the current VM does not support modules. * @param classLoader The instrumented class's class loader. * @param typeName The binary name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param typePool The type pool to use. * @param classFileLocator The class file locator to use. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ private byte[] doTransform(JavaModule module, ClassLoader classLoader, String typeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, ClassFileLocator classFileLocator) { return transformation.resolve(descriptionStrategy.apply(typeName, classBeingRedefined, typePool), classLoader, module, classBeingRedefined, protectionDomain, typePool, ignoredTypeMatcher).apply(initializationStrategy, classFileLocator, typeStrategy, byteBuddy, nativeMethodStrategy, bootstrapInjectionStrategy, accessControlContext, listener); } @Override public Reset reset(Instrumentation instrumentation, RedefinitionStrategy redefinitionStrategy, RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator, RedefinitionStrategy.Listener redefinitionListener) { if (instrumentation.removeTransformer(this)) { if (!redefinitionStrategy.isEnabled()) { return Reset.Simple.ACTIVE; } redefinitionStrategy.isRetransforming(instrumentation); Map<Class<?>, Throwable> failures = new HashMap<Class<?>, Throwable>(); RedefinitionStrategy.Delegate<?> delegate = redefinitionStrategy.make(transformation); for (Class<?> type : instrumentation.getAllLoadedClasses()) { JavaModule module = JavaModule.ofType(type); try { delegate.consider(ignoredTypeMatcher, Listener.NoOp.INSTANCE, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), type, poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader())), type, type, module, !instrumentation.isModifiableClass(type)); } catch (Throwable throwable) { try { if (descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(type, throwable)) { delegate.consider(ignoredTypeMatcher, Listener.NoOp.INSTANCE, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), NO_LOADED_TYPE, poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader())), type, NO_LOADED_TYPE, module); } else { failures.put(type, throwable); } } catch (Throwable fallback) { failures.put(type, fallback); } } } delegate.apply(instrumentation, locationStrategy, Listener.NoOp.INSTANCE, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, new RedefinitionStrategy.Listener.Compound(new FailureCollectingListener(failures), redefinitionListener), RedefinitionStrategy.FailureHandler.Default.SUPPRESSING); return Reset.WithErrors.ofPotentiallyErroneous(failures); } else { return Reset.Simple.INACTIVE; } } /* does not implement hashCode and equals in order to align with identity treatment of the JVM */ @Override public String toString() { return "AgentBuilder.Default." + getClass().getSimpleName() + "{" + "byteBuddy=" + byteBuddy + ", listener=" + listener + ", poolStrategy=" + poolStrategy + ", typeStrategy=" + typeStrategy + ", locationStrategy=" + locationStrategy + ", initializationStrategy=" + initializationStrategy + ", nativeMethodStrategy=" + nativeMethodStrategy + ", bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", descriptionStrategy=" + descriptionStrategy + ", fallbackStrategy=" + fallbackStrategy + ", ignoredTypeMatcher=" + ignoredTypeMatcher + ", transformation=" + transformation + ", accessControlContext=" + accessControlContext + '}'; } /** * A factory for creating a {@link ClassFileTransformer} for the current VM. */ protected interface Factory { /** * Creates a new class file transformer for the current VM. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param fallbackStrategy The fallback strategy to use. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. * @return A class file transformer for the current VM that supports the API of the current VM. */ ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation); /** * A factory for a class file transformer on a JVM that supports the {@code java.lang.reflect.Module} API to override * the newly added method of the {@link ClassFileTransformer} to capture an instrumented class's module. */ class ForJava9CapableVm implements Factory { /** * A constructor for creating a {@link ClassFileTransformer} that overrides the newly added method for extracting * the {@code java.lang.reflect.Module} of an instrumented class. */ private final Constructor<? extends ResettableClassFileTransformer> executingTransformer; /** * Creates a class file transformer factory for a Java 9 capable VM. * * @param executingTransformer A constructor for creating a {@link ClassFileTransformer} that overrides the newly added * method for extracting the {@code java.lang.reflect.Module} of an instrumented class. */ protected ForJava9CapableVm(Constructor<? extends ResettableClassFileTransformer> executingTransformer) { this.executingTransformer = executingTransformer; } @Override public ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { try { return executingTransformer.newInstance(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } catch (IllegalAccessException exception) { throw new IllegalStateException("Cannot access " + executingTransformer, exception); } catch (InstantiationException exception) { throw new IllegalStateException("Cannot instantiate " + executingTransformer.getDeclaringClass(), exception); } catch (InvocationTargetException exception) { throw new IllegalStateException("Cannot invoke " + executingTransformer, exception.getCause()); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForJava9CapableVm that = (ForJava9CapableVm) object; return executingTransformer.equals(that.executingTransformer); } @Override public int hashCode() { return executingTransformer.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Factory.ForJava9CapableVm{" + "executingTransformer=" + executingTransformer + '}'; } } /** * A factory for a {@link ClassFileTransformer} on a VM that does not support the {@code java.lang.reflect.Module} API. */ enum ForLegacyVm implements Factory { /** * The singleton instance. */ INSTANCE; @Override public ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { return new ExecutingTransformer(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Factory.ForLegacyVm." + name(); } } } /** * An action to create an implementation of {@link ExecutingTransformer} that support Java 9 modules. */ protected enum FactoryCreationOption implements PrivilegedAction<Factory> { /** * The singleton instance. */ INSTANCE; @Override @SuppressFBWarnings(value = "REC_CATCH_EXCEPTION", justification = "Exception should not be rethrown but trigger a fallback") public Factory run() { try { return new Factory.ForJava9CapableVm(new ByteBuddy() .subclass(ExecutingTransformer.class) .name(ExecutingTransformer.class.getName() + "$ByteBuddy$ModuleSupport") .method(named("transform").and(takesArgument(0, JavaType.MODULE.load()))) .intercept(MethodCall.invoke(ExecutingTransformer.class.getDeclaredMethod("transform", Object.class, ClassLoader.class, String.class, Class.class, ProtectionDomain.class, byte[].class)).onSuper().withAllArguments()) .make() .load(ExecutingTransformer.class.getClassLoader(), ClassLoadingStrategy.Default.WRAPPER_PERSISTENT.with(ExecutingTransformer.class.getProtectionDomain())) .getLoaded() .getDeclaredConstructor(ByteBuddy.class, Listener.class, PoolStrategy.class, TypeStrategy.class, LocationStrategy.class, NativeMethodStrategy.class, InitializationStrategy.class, BootstrapInjectionStrategy.class, DescriptionStrategy.class, FallbackStrategy.class, RawMatcher.class, Transformation.class)); } catch (Exception ignored) { return Factory.ForLegacyVm.INSTANCE; } } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.InheritanceAction." + name(); } } /** * A privileged action for transforming a class on a JVM prior to Java 9. */ protected class LegacyVmDispatcher implements PrivilegedAction<byte[]> { /** * The type's class loader or {@code null} if the bootstrap class loader is represented. */ private final ClassLoader classLoader; /** * The type's internal name or {@code null} if no such name exists. */ private final String internalTypeName; /** * The class being redefined or {@code null} if no such class exists. */ private final Class<?> classBeingRedefined; /** * The type's protection domain. */ private final ProtectionDomain protectionDomain; /** * The type's binary representation. */ private final byte[] binaryRepresentation; /** * Creates a new type transformation dispatcher. * * @param classLoader The type's class loader or {@code null} if the bootstrap class loader is represented. * @param internalTypeName The type's internal name or {@code null} if no such name exists. * @param classBeingRedefined The class being redefined or {@code null} if no such class exists. * @param protectionDomain The type's protection domain. * @param binaryRepresentation The type's binary representation. */ protected LegacyVmDispatcher(ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { this.classLoader = classLoader; this.internalTypeName = internalTypeName; this.classBeingRedefined = classBeingRedefined; this.protectionDomain = protectionDomain; this.binaryRepresentation = binaryRepresentation; } @Override public byte[] run() { return transform(JavaModule.UNSUPPORTED, classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation); } /** * Returns the outer instance. * * @return The outer instance. */ private ExecutingTransformer getOuter() { return ExecutingTransformer.this; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; LegacyVmDispatcher that = (LegacyVmDispatcher) object; return (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (internalTypeName != null ? internalTypeName.equals(that.internalTypeName) : that.internalTypeName == null) && (classBeingRedefined != null ? classBeingRedefined.equals(that.classBeingRedefined) : that.classBeingRedefined == null) && protectionDomain.equals(that.protectionDomain) && ExecutingTransformer.this.equals(that.getOuter()) && Arrays.equals(binaryRepresentation, that.binaryRepresentation); } @Override public int hashCode() { int result = classLoader != null ? classLoader.hashCode() : 0; result = 31 * result + (internalTypeName != null ? internalTypeName.hashCode() : 0); result = 31 * result + (classBeingRedefined != null ? classBeingRedefined.hashCode() : 0); result = 31 * result + protectionDomain.hashCode(); result = 31 * result + ExecutingTransformer.this.hashCode(); result = 31 * result + Arrays.hashCode(binaryRepresentation); return result; } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.LegacyVmDispatcher{" + "outer=" + ExecutingTransformer.this + ", classLoader=" + classLoader + ", internalTypeName='" + internalTypeName + '\'' + ", classBeingRedefined=" + classBeingRedefined + ", protectionDomain=" + protectionDomain + ", binaryRepresentation=<" + binaryRepresentation.length + " bytes>" + '}'; } } /** * A privileged action for transforming a class on a JVM that supports modules. */ protected class Java9CapableVmDispatcher implements PrivilegedAction<byte[]> { /** * The type's {@code java.lang.reflect.Module}. */ private final Object rawModule; /** * The type's class loader or {@code null} if the type is loaded by the bootstrap loader. */ private final ClassLoader classLoader; /** * The type's internal name or {@code null} if no such name exists. */ private final String internalTypeName; /** * The class being redefined or {@code null} if no such class exists. */ private final Class<?> classBeingRedefined; /** * The type's protection domain. */ private final ProtectionDomain protectionDomain; /** * The type's binary representation. */ private final byte[] binaryRepresentation; /** * Creates a new legacy dispatcher. * * @param rawModule The type's {@code java.lang.reflect.Module}. * @param classLoader The type's class loader or {@code null} if the type is loaded by the bootstrap loader. * @param internalTypeName The type's internal name or {@code null} if no such name exists. * @param classBeingRedefined The class being redefined or {@code null} if no such class exists. * @param protectionDomain The type's protection domain. * @param binaryRepresentation The type's binary representation. */ protected Java9CapableVmDispatcher(Object rawModule, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { this.rawModule = rawModule; this.classLoader = classLoader; this.internalTypeName = internalTypeName; this.classBeingRedefined = classBeingRedefined; this.protectionDomain = protectionDomain; this.binaryRepresentation = binaryRepresentation; } @Override public byte[] run() { return transform(JavaModule.of(rawModule), classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation); } /** * Returns the outer instance. * * @return The outer instance. */ private ExecutingTransformer getOuter() { return ExecutingTransformer.this; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Java9CapableVmDispatcher that = (Java9CapableVmDispatcher) object; return rawModule.equals(that.rawModule) && (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (internalTypeName != null ? internalTypeName.equals(that.internalTypeName) : that.internalTypeName == null) && (classBeingRedefined != null ? classBeingRedefined.equals(that.classBeingRedefined) : that.classBeingRedefined == null) && protectionDomain.equals(that.protectionDomain) && ExecutingTransformer.this.equals(that.getOuter()) && Arrays.equals(binaryRepresentation, that.binaryRepresentation); } @Override public int hashCode() { int result = rawModule.hashCode(); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (internalTypeName != null ? internalTypeName.hashCode() : 0); result = 31 * result + (classBeingRedefined != null ? classBeingRedefined.hashCode() : 0); result = 31 * result + protectionDomain.hashCode(); result = 31 * result + ExecutingTransformer.this.hashCode(); result = 31 * result + Arrays.hashCode(binaryRepresentation); return result; } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Java9CapableVmDispatcher{" + "outer=" + ExecutingTransformer.this + ", rawModule=" + rawModule + ", classLoader=" + classLoader + ", internalTypeName='" + internalTypeName + '\'' + ", classBeingRedefined=" + classBeingRedefined + ", protectionDomain=" + protectionDomain + ", binaryRepresentation=<" + binaryRepresentation.length + " bytes>" + '}'; } } /** * A listener that adds all discovered errors to a map. */ protected static class FailureCollectingListener extends RedefinitionStrategy.Listener.Adapter { /** * A mapping of failures by the class that causes this failure. */ private final Map<Class<?>, Throwable> failures; /** * Creates a new failure collecting listener. * * @param failures A mapping of failures by the class that causes this failure. */ protected FailureCollectingListener(Map<Class<?>, Throwable> failures) { this.failures = failures; } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { for (Class<?> type : batch) { failures.put(type, throwable); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; FailureCollectingListener that = (FailureCollectingListener) object; return failures.equals(that.failures); } @Override public int hashCode() { return failures.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.FailureCollectingListener{" + "failures=" + failures + '}'; } } } /** * An abstract implementation of an agent builder that delegates all invocation to another instance. * * @param <T> The type that is produced by chaining a matcher. */ protected abstract class Delegator<T extends Matchable<T>> extends Matchable.AbstractBase<T> implements AgentBuilder { /** * Materializes the currently described {@link net.bytebuddy.agent.builder.AgentBuilder}. * * @return An agent builder that represents the currently described entry of this instance. */ protected abstract AgentBuilder materialize(); @Override public AgentBuilder with(ByteBuddy byteBuddy) { return materialize().with(byteBuddy); } @Override public AgentBuilder with(Listener listener) { return materialize().with(listener); } @Override public AgentBuilder with(TypeStrategy typeStrategy) { return materialize().with(typeStrategy); } @Override public AgentBuilder with(PoolStrategy poolStrategy) { return materialize().with(poolStrategy); } @Override public AgentBuilder with(LocationStrategy locationStrategy) { return materialize().with(locationStrategy); } @Override public AgentBuilder with(InitializationStrategy initializationStrategy) { return materialize().with(initializationStrategy); } @Override public Redefining with(RedefinitionStrategy redefinitionStrategy) { return materialize().with(redefinitionStrategy); } @Override public AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy) { return materialize().with(lambdaInstrumentationStrategy); } @Override public AgentBuilder with(DescriptionStrategy descriptionStrategy) { return materialize().with(descriptionStrategy); } @Override public AgentBuilder with(InstallationStrategy installationStrategy) { return materialize().with(installationStrategy); } @Override public AgentBuilder with(FallbackStrategy fallbackStrategy) { return materialize().with(fallbackStrategy); } @Override public AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder) { return materialize().enableBootstrapInjection(instrumentation, folder); } @Override public AgentBuilder disableBootstrapInjection() { return materialize().disableBootstrapInjection(); } @Override public AgentBuilder enableNativeMethodPrefix(String prefix) { return materialize().enableNativeMethodPrefix(prefix); } @Override public AgentBuilder disableNativeMethodPrefix() { return materialize().disableNativeMethodPrefix(); } @Override public AgentBuilder disableClassFormatChanges() { return materialize().disableClassFormatChanges(); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type) { return materialize().assureReadEdgeTo(instrumentation, type); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module) { return materialize().assureReadEdgeTo(instrumentation, module); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return materialize().assureReadEdgeTo(instrumentation, modules); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type) { return materialize().assureReadEdgeFromAndTo(instrumentation, type); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module) { return materialize().assureReadEdgeFromAndTo(instrumentation, module); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return materialize().assureReadEdgeFromAndTo(instrumentation, modules); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher) { return materialize().type(typeMatcher); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return materialize().type(typeMatcher, classLoaderMatcher); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return materialize().type(typeMatcher, classLoaderMatcher, moduleMatcher); } @Override public Identified.Narrowable type(RawMatcher matcher) { return materialize().type(matcher); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> ignoredTypes) { return materialize().ignore(ignoredTypes); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> ignoredTypes, ElementMatcher<? super ClassLoader> ignoredClassLoaders) { return materialize().ignore(ignoredTypes, ignoredClassLoaders); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return materialize().ignore(typeMatcher, classLoaderMatcher, moduleMatcher); } @Override public Ignored ignore(RawMatcher rawMatcher) { return materialize().ignore(rawMatcher); } @Override public ResettableClassFileTransformer makeRaw() { return materialize().makeRaw(); } @Override public ResettableClassFileTransformer installOn(Instrumentation instrumentation) { return materialize().installOn(instrumentation); } @Override public ResettableClassFileTransformer installOnByteBuddyAgent() { return materialize().installOnByteBuddyAgent(); } } /** * A delegator transformer for further precising what types to ignore. */ protected class Ignoring extends Delegator<Ignored> implements Ignored { /** * A matcher for identifying types that should not be instrumented. */ private final RawMatcher rawMatcher; /** * Creates a new agent builder for further specifying what types to ignore. * * @param rawMatcher A matcher for identifying types that should not be instrumented. */ protected Ignoring(RawMatcher rawMatcher) { this.rawMatcher = rawMatcher; } @Override protected AgentBuilder materialize() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, rawMatcher, transformation); } @Override public Ignored and(RawMatcher rawMatcher) { return new Ignoring(new RawMatcher.Conjunction(this.rawMatcher, rawMatcher)); } @Override public Ignored or(RawMatcher rawMatcher) { return new Ignoring(new RawMatcher.Disjunction(this.rawMatcher, rawMatcher)); } /** * Returns the outer instance. * * @return The outer instance. */ private Default getOuter() { return Default.this; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && rawMatcher.equals(((Ignoring) other).rawMatcher) && Default.this.equals(((Ignoring) other).getOuter()); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + Default.this.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Ignoring{" + "rawMatcher=" + rawMatcher + ", agentBuilder=" + Default.this + '}'; } } /** * A helper class that describes a {@link net.bytebuddy.agent.builder.AgentBuilder.Default} after supplying * a {@link net.bytebuddy.agent.builder.AgentBuilder.RawMatcher} such that one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s can be supplied. */ protected class Transforming extends Delegator<Identified.Narrowable> implements Identified.Extendable, Identified.Narrowable { /** * The supplied raw matcher. */ private final RawMatcher rawMatcher; /** * The supplied transformer. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new matched default agent builder. * * @param rawMatcher The supplied raw matcher. * @param transformer The supplied transformer. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Transforming(RawMatcher rawMatcher, Transformer transformer, boolean decorator) { this.rawMatcher = rawMatcher; this.transformer = transformer; this.decorator = decorator; } @Override protected AgentBuilder materialize() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, new Transformation.Compound(new Transformation.Simple(rawMatcher, transformer, decorator), transformation)); } @Override public Identified.Extendable transform(Transformer transformer) { return new Transforming(rawMatcher, new Transformer.Compound(this.transformer, transformer), decorator); } @Override public AgentBuilder asDecorator() { return new Transforming(rawMatcher, transformer, true); } @Override public Narrowable and(RawMatcher rawMatcher) { return new Transforming(new RawMatcher.Conjunction(this.rawMatcher, rawMatcher), transformer, decorator); } @Override public Narrowable or(RawMatcher rawMatcher) { return new Transforming(new RawMatcher.Disjunction(this.rawMatcher, rawMatcher), transformer, decorator); } /** * Returns the outer instance. * * @return The outer instance. */ private Default getOuter() { return Default.this; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && decorator == ((Transforming) other).decorator && rawMatcher.equals(((Transforming) other).rawMatcher) && transformer.equals(((Transforming) other).transformer) && Default.this.equals(((Transforming) other).getOuter()); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + transformer.hashCode(); result = 31 * result + Default.this.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transforming{" + "rawMatcher=" + rawMatcher + ", transformer=" + transformer + ", decorator=" + decorator + ", agentBuilder=" + Default.this + '}'; } } } }
byte-buddy-dep/src/main/java/net/bytebuddy/agent/builder/AgentBuilder.java
package net.bytebuddy.agent.builder; import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; import net.bytebuddy.ByteBuddy; import net.bytebuddy.ClassFileVersion; import net.bytebuddy.asm.AsmVisitorWrapper; import net.bytebuddy.build.EntryPoint; import net.bytebuddy.build.Plugin; import net.bytebuddy.description.field.FieldDescription; import net.bytebuddy.description.method.MethodDescription; import net.bytebuddy.description.method.ParameterDescription; import net.bytebuddy.description.modifier.*; import net.bytebuddy.description.type.TypeDescription; import net.bytebuddy.dynamic.ClassFileLocator; import net.bytebuddy.dynamic.DynamicType; import net.bytebuddy.dynamic.NexusAccessor; import net.bytebuddy.dynamic.TypeResolutionStrategy; import net.bytebuddy.dynamic.loading.ClassInjector; import net.bytebuddy.dynamic.loading.ClassLoadingStrategy; import net.bytebuddy.dynamic.loading.ClassReloadingStrategy; import net.bytebuddy.dynamic.scaffold.InstrumentedType; import net.bytebuddy.dynamic.scaffold.inline.MethodNameTransformer; import net.bytebuddy.dynamic.scaffold.subclass.ConstructorStrategy; import net.bytebuddy.implementation.ExceptionMethod; import net.bytebuddy.implementation.Implementation; import net.bytebuddy.implementation.LoadedTypeInitializer; import net.bytebuddy.implementation.MethodCall; import net.bytebuddy.implementation.auxiliary.AuxiliaryType; import net.bytebuddy.implementation.bytecode.ByteCodeAppender; import net.bytebuddy.implementation.bytecode.Duplication; import net.bytebuddy.implementation.bytecode.StackManipulation; import net.bytebuddy.implementation.bytecode.TypeCreation; import net.bytebuddy.implementation.bytecode.assign.Assigner; import net.bytebuddy.implementation.bytecode.assign.TypeCasting; import net.bytebuddy.implementation.bytecode.collection.ArrayFactory; import net.bytebuddy.implementation.bytecode.constant.ClassConstant; import net.bytebuddy.implementation.bytecode.constant.IntegerConstant; import net.bytebuddy.implementation.bytecode.constant.TextConstant; import net.bytebuddy.implementation.bytecode.member.FieldAccess; import net.bytebuddy.implementation.bytecode.member.MethodInvocation; import net.bytebuddy.implementation.bytecode.member.MethodReturn; import net.bytebuddy.implementation.bytecode.member.MethodVariableAccess; import net.bytebuddy.matcher.ElementMatcher; import net.bytebuddy.matcher.LatentMatcher; import net.bytebuddy.pool.TypePool; import net.bytebuddy.utility.JavaConstant; import net.bytebuddy.utility.JavaModule; import net.bytebuddy.utility.JavaType; import org.objectweb.asm.Label; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; import java.io.*; import java.lang.instrument.ClassDefinition; import java.lang.instrument.ClassFileTransformer; import java.lang.instrument.Instrumentation; import java.lang.instrument.UnmodifiableClassException; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.security.AccessControlContext; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.ProtectionDomain; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import static net.bytebuddy.matcher.ElementMatchers.*; /** * <p> * An agent builder provides a convenience API for defining a * <a href="http://docs.oracle.com/javase/6/docs/api/java/lang/instrument/package-summary.html">Java agent</a>. By default, * this transformation is applied by rebasing the type if not specified otherwise by setting a * {@link TypeStrategy}. * </p> * <p> * When defining several {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s, the agent builder always * applies the transformers that were supplied with the last applicable matcher. Therefore, more general transformers * should be defined first. * </p> * <p> * <b>Note</b>: Any transformation is performed using the {@link AccessControlContext} of an agent's creator. * </p> */ public interface AgentBuilder { /** * Defines the given {@link net.bytebuddy.ByteBuddy} instance to be used by the created agent. * * @param byteBuddy The Byte Buddy instance to be used. * @return A new instance of this agent builder which makes use of the given {@code byteBuddy} instance. */ AgentBuilder with(ByteBuddy byteBuddy); /** * Defines the given {@link net.bytebuddy.agent.builder.AgentBuilder.Listener} to be notified by the created agent. * The given listener is notified after any other listener that is already registered. If a listener is registered * twice, it is also notified twice. * * @param listener The listener to be notified. * @return A new instance of this agent builder which creates an agent that informs the given listener about * events. */ AgentBuilder with(Listener listener); /** * Defines the use of the given type locator for locating a {@link TypeDescription} for an instrumented type. * * @param poolStrategy The type locator to use. * @return A new instance of this agent builder which uses the given type locator for looking up class files. */ AgentBuilder with(PoolStrategy poolStrategy); /** * Defines the use of the given location strategy for locating binary data to given class names. * * @param locationStrategy The location strategy to use. * @return A new instance of this agent builder which uses the given location strategy for looking up class files. */ AgentBuilder with(LocationStrategy locationStrategy); /** * Defines how types should be transformed, e.g. if they should be rebased or redefined by the created agent. * * @param typeStrategy The type strategy to use. * @return A new instance of this agent builder which uses the given type strategy. */ AgentBuilder with(TypeStrategy typeStrategy); /** * Defines a given initialization strategy to be applied to generated types. An initialization strategy is responsible * for setting up a type after it was loaded. This initialization must be performed after the transformation because * a Java agent is only invoked before loading a type. By default, the initialization logic is added to a class's type * initializer which queries a global object for any objects that are to be injected into the generated type. * * @param initializationStrategy The initialization strategy to use. * @return A new instance of this agent builder that applies the given initialization strategy. */ AgentBuilder with(InitializationStrategy initializationStrategy); /** * <p> * Specifies a strategy for modifying types that were already loaded prior to the installation of this transformer. * </p> * <p> * <b>Important</b>: Most JVMs do not support changes of a class's structure after a class was already * loaded. Therefore, it is typically required that this class file transformer was built while enabling * {@link AgentBuilder#disableClassFormatChanges()}. * </p> * * @param redefinitionStrategy The redefinition strategy to apply. * @return A new instance of this agent builder that applies the given redefinition strategy. */ Redefining with(RedefinitionStrategy redefinitionStrategy); /** * <p> * Enables or disables management of the JVM's {@code LambdaMetafactory} which is responsible for creating classes that * implement lambda expressions. Without this feature enabled, classes that are represented by lambda expressions are * not instrumented by the JVM such that Java agents have no effect on them when a lambda expression's class is loaded * for the first time. * </p> * <p> * When activating this feature, Byte Buddy instruments the {@code LambdaMetafactory} and takes over the responsibility * of creating classes that represent lambda expressions. In doing so, Byte Buddy has the opportunity to apply the built * class file transformer. If the current VM does not support lambda expressions, activating this feature has no effect. * </p> * <p> * <b>Important</b>: If this feature is active, it is important to release the built class file transformer when * deactivating it. Normally, it is sufficient to call {@link Instrumentation#removeTransformer(ClassFileTransformer)}. * When this feature is enabled, it is however also required to invoke * {@link LambdaInstrumentationStrategy#release(ClassFileTransformer, Instrumentation)}. Otherwise, the executing VMs class * loader retains a reference to the class file transformer what can cause a memory leak. * </p> * * @param lambdaInstrumentationStrategy {@code true} if this feature should be enabled. * @return A new instance of this agent builder where this feature is explicitly enabled or disabled. */ AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy); /** * Specifies a strategy to be used for resolving {@link TypeDescription} for any type handled by the created transformer. * * @param descriptionStrategy The description strategy to use. * @return A new instance of this agent builder that applies the given description strategy. */ AgentBuilder with(DescriptionStrategy descriptionStrategy); /** * Specifies an installation strategy that this agent builder applies upon installing an agent. * * @param installationStrategy The installation strategy to be used. * @return A new agent builder that applies the supplied installation strategy. */ AgentBuilder with(InstallationStrategy installationStrategy); /** * Specifies a fallback strategy to that this agent builder applies upon installing an agent and during class file transformation. * * @param fallbackStrategy The fallback strategy to be used. * @return A new agent builder that applies the supplied fallback strategy. */ AgentBuilder with(FallbackStrategy fallbackStrategy); /** * Enables class injection of auxiliary classes into the bootstrap class loader. * * @param instrumentation The instrumentation instance that is used for appending jar files to the * bootstrap class path. * @param folder The folder in which jar files of the injected classes are to be stored. * @return An agent builder with bootstrap class loader class injection enabled. */ AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder); /** * Enables the use of the given native method prefix for instrumented methods. Note that this prefix is also * applied when preserving non-native methods. The use of this prefix is also registered when installing the * final agent with an {@link java.lang.instrument.Instrumentation}. * * @param prefix The prefix to be used. * @return A new instance of this agent builder which uses the given native method prefix. */ AgentBuilder enableNativeMethodPrefix(String prefix); /** * Disables the use of a native method prefix for instrumented methods. * * @return A new instance of this agent builder which does not use a native method prefix. */ AgentBuilder disableNativeMethodPrefix(); /** * Disables injection of auxiliary classes into the bootstrap class path. * * @return A new instance of this agent builder which does not apply bootstrap class loader injection. */ AgentBuilder disableBootstrapInjection(); /** * <p> * Disables all implicit changes on a class file that Byte Buddy would apply for certain instrumentations. When * using this option, it is no longer possible to rebase a method, i.e. intercepted methods are fully replaced. Furthermore, * it is no longer possible to implicitly apply loaded type initializers for explicitly initializing the generated type. * </p> * <p> * This is equivalent to setting {@link InitializationStrategy.NoOp} and {@link TypeStrategy.Default#REDEFINE_DECLARED_ONLY} * as well as configuring the underlying {@link ByteBuddy} instance to use a {@link net.bytebuddy.implementation.Implementation.Context.Disabled}. * </p> * * @return A new instance of this agent builder that does not apply any implicit changes to the received class file. */ AgentBuilder disableClassFormatChanges(); /** * Assures that all modules of the supplied types are read by the module of any instrumented type. If the current VM does not support * the Java module system, calling this method has no effect and this instance is returned. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param type The types for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type); /** * Assures that all supplied modules are read by the module of any instrumented type. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param module The modules for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module); /** * Assures that all supplied modules are read by the module of any instrumented type. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param modules The modules for which to assure their module-visibility from any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules); /** * Assures that all modules of the supplied types are read by the module of any instrumented type and vice versa. * If the current VM does not support the Java module system, calling this method has no effect and this instance is returned. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param type The types for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type); /** * Assures that all supplied modules are read by the module of any instrumented type and vice versa. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param module The modules for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module); /** * Assures that all supplied modules are read by the module of any instrumented type and vice versa. * * @param instrumentation The instrumentation instance that is used for adding a module read-dependency. * @param modules The modules for which to assure their module-visibility from and to any instrumented class. * @return A new instance of this agent builder that assures the supplied types module visibility. * @see Listener.ModuleReadEdgeCompleting */ AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it is * also recommended, to exclude class loaders such as for example the bootstrap class loader by using * {@link AgentBuilder#type(ElementMatcher, ElementMatcher)} instead. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being loaded that * decides if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should * be applied for that type. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when the given {@code typeMatcher} * indicates a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being * loaded that decides if the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be applied for * that type. * @param classLoaderMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the * {@link java.lang.ClassLoader} that is loading the type being loaded. This matcher * is always applied first where the type matcher is not applied in case that this * matcher does not indicate a match. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when both the given * {@code typeMatcher} and {@code classLoaderMatcher} indicate a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * This behavior can be changed by {@link Identified.Extendable#asDecorator()} where subsequent type matchers are also applied. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param typeMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied on the type being * loaded that decides if the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be applied for * that type. * @param classLoaderMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the * {@link java.lang.ClassLoader} that is loading the type being loaded. This matcher * is always applied second where the type matcher is not applied in case that this * matcher does not indicate a match. * @param moduleMatcher An {@link net.bytebuddy.matcher.ElementMatcher} that is applied to the {@link JavaModule} * of the type being loaded. This matcher is always applied first where the class loader and * type matchers are not applied in case that this matcher does not indicate a match. On a JVM * that does not support the Java modules system, this matcher is not applied. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when both the given * {@code typeMatcher} and {@code classLoaderMatcher} indicate a match. */ Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * <p> * Matches a type being loaded in order to apply the supplied {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s before loading this type. * If several matchers positively match a type only the latest registered matcher is considered for transformation. * </p> * <p> * If this matcher is chained with additional subsequent matchers, this matcher is always executed first whereas the following matchers are * executed in the order of their execution. If any matcher indicates that a type is to be matched, none of the following matchers is still queried. * </p> * <p> * <b>Note</b>: When applying a matcher, regard the performance implications by {@link AgentBuilder#ignore(ElementMatcher)}. The former * matcher is applied first such that it makes sense to ignore name spaces that are irrelevant to instrumentation. If possible, it * is also recommended, to exclude class loaders such as for example the bootstrap class loader. * </p> * * @param matcher A matcher that decides if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should be * applied for a type that is being loaded. * @return A definable that represents this agent builder which allows for the definition of one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s to be applied when the given {@code matcher} * indicates a match. */ Identified.Narrowable type(RawMatcher matcher); /** * <p> * Excludes any type that is matched by the provided matcher from instrumentation and considers types by all {@link ClassLoader}s. * By default, Byte Buddy does not instrument synthetic types or types that are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher); /** * <p> * Excludes any type that is matched by the provided matcher and is loaded by a class loader matching the second matcher. * By default, Byte Buddy does not instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that * are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @param classLoaderMatcher A matcher that identifies a class loader that identifies classes that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * <p> * Excludes any type that is matched by the provided matcher and is loaded by a class loader matching the second matcher. * By default, Byte Buddy does not instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that * are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param typeMatcher A matcher that identifies types that should not be instrumented. * @param classLoaderMatcher A matcher that identifies a class loader that identifies classes that should not be instrumented. * @param moduleMatcher A matcher that identifies a module that identifies classes that should not be instrumented. On a JVM * that does not support the Java modules system, this matcher is not applied. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * <p> * Excludes any type that is matched by the raw matcher provided to this method. By default, Byte Buddy does not * instrument synthetic types, types within a {@code net.bytebuddy.*} package or types that are loaded by the bootstrap class loader. * </p> * <p> * When ignoring a type, any subsequently chained matcher is applied after this matcher in the order of their registration. Also, if * any matcher indicates that a type is to be ignored, none of the following chained matchers is executed. * </p> * <p> * <b>Note</b>: For performance reasons, it is recommended to always include a matcher that excludes as many namespaces * as possible. Byte Buddy can determine a type's name without parsing its class file and can therefore discard such * types with minimal overhead. When a different property of a type - such as for example its modifiers or its annotations * is accessed - Byte Buddy parses the class file lazily in order to allow for such a matching. Therefore, any exclusion * of a name should always be done as a first step and even if it does not influence the selection of what types are * matched. Without changing this property, the class file of every type is being parsed! * </p> * <p> * <b>Warning</b>: If a type is loaded during the instrumentation of the same type, this causes the original call site that loads the type * to remain unbound, causing a {@link LinkageError}. It is therefore important to not instrument types that may be loaded during the application * of a {@link Transformer}. For this reason, it is not recommended to instrument classes of the bootstrap class loader that Byte Buddy might * require for instrumenting a class or to instrument any of Byte Buddy's classes. If such instrumentation is desired, it is important to * assert for each class that they are not loaded during instrumentation. * </p> * * @param rawMatcher A raw matcher that identifies types that should not be instrumented. * @return A new instance of this agent builder that ignores all types that are matched by the provided matcher. * All previous matchers for ignored types are discarded. */ Ignored ignore(RawMatcher rawMatcher); /** * Creates a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of this * agent builder. * * @return A class file transformer that implements the configuration of this agent builder. */ ResettableClassFileTransformer makeRaw(); /** * <p> * Creates and installs a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of * this agent builder with a given {@link java.lang.instrument.Instrumentation}. If retransformation is enabled, * the installation also causes all loaded types to be retransformed. * </p> * <p> * If installing the created class file transformer causes an exception to be thrown, the consequences of this * exception are determined by the {@link InstallationStrategy} of this builder. * </p> * * @param instrumentation The instrumentation on which this agent builder's configuration is to be installed. * @return The installed class file transformer. */ ResettableClassFileTransformer installOn(Instrumentation instrumentation); /** * Creates and installs a {@link java.lang.instrument.ClassFileTransformer} that implements the configuration of * this agent builder with the Byte Buddy-agent which must be installed prior to calling this method. * * @return The installed class file transformer. * @see AgentBuilder#installOn(Instrumentation) */ ResettableClassFileTransformer installOnByteBuddyAgent(); /** * An abstraction for extending a matcher. * * @param <T> The type that is produced by chaining a matcher. */ interface Matchable<T extends Matchable<T>> { /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. When matching a * type, class loaders are not considered. * * @param typeMatcher A matcher for the type being matched. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @param moduleMatcher A matcher for the type's module. On a JVM that does not support modules, the Java module is represented by {@code null}. * @return A chained matcher. */ T and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * Defines a matching that is positive if both the previous matcher and the supplied matcher are matched. * * @param rawMatcher A raw matcher for the type being matched. * @return A chained matcher. */ T and(RawMatcher rawMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. When matching a * type, the class loader is not considered. * * @param typeMatcher A matcher for the type being matched. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param typeMatcher A matcher for the type being matched. * @param classLoaderMatcher A matcher for the type's class loader. * @param moduleMatcher A matcher for the type's module. On a JVM that does not support modules, the Java module is represented by {@code null}. * @return A chained matcher. */ T or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher); /** * Defines a matching that is positive if the previous matcher or the supplied matcher are matched. * * @param rawMatcher A raw matcher for the type being matched. * @return A chained matcher. */ T or(RawMatcher rawMatcher); /** * An abstract base implementation of a matchable. * * @param <S> The type that is produced by chaining a matcher. */ abstract class AbstractBase<S extends Matchable<S>> implements Matchable<S> { @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher) { return and(typeMatcher, any()); } @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return and(typeMatcher, classLoaderMatcher, any()); } @Override public S and(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return and(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, moduleMatcher)); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher) { return or(typeMatcher, any()); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return or(typeMatcher, classLoaderMatcher, any()); } @Override public S or(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return or(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, moduleMatcher)); } } } /** * Allows to further specify ignored types. */ interface Ignored extends Matchable<Ignored>, AgentBuilder { /* this is merely a unionizing interface that does not declare methods */ } /** * Describes an {@link net.bytebuddy.agent.builder.AgentBuilder} which was handed a matcher for identifying * types to instrumented in order to supply one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s. */ interface Identified { /** * Applies the given transformer for the already supplied matcher. * * @param transformer The transformer to apply. * @return A new instance of this agent builder with the transformer being applied when the previously supplied matcher * identified a type for instrumentation which also allows for the registration of subsequent transformers. */ Extendable transform(Transformer transformer); /** * Allows to specify a type matcher for a type to instrument. */ interface Narrowable extends Matchable<Narrowable>, Identified { /* this is merely a unionizing interface that does not declare methods */ } /** * This interface is used to allow for optionally providing several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} to applied when a matcher identifies a type * to be instrumented. Any subsequent transformers are applied in the order they are registered. */ interface Extendable extends AgentBuilder, Identified { /** * <p> * Applies the specified transformation as a decorative transformation. For a decorative transformation, the supplied * transformer is prepended to any previous transformation that also matches the instrumented type, i.e. both transformations * are supplied. This procedure is repeated until a transformer is reached that matches the instrumented type but is not * defined as decorating after which no further transformations are considered. If all matching transformations are declared * as decorating, all matching transformers are applied. * </p> * <p> * <b>Note</b>: A decorating transformer is applied <b>after</b> previously registered transformers. * </p> * * @return A new instance of this agent builder with the specified transformation being applied as a decorator. */ AgentBuilder asDecorator(); } } /** * An agent builder that allows the configuration of how to apply a {@link RedefinitionStrategy}. Such a configuration * is only applied if the redefinition strategy is alive. */ interface Redefining extends AgentBuilder { /** * A batch allocator is responsible for diving a redefining of existing types into several chunks. This allows * to narrow down errors for the redefining of specific types or to apply a {@link RedefinitionStrategy.Listener} * action between chunks. * * @param redefinitionBatchAllocator The batch allocator to use. * @return A new instance of this agent builder which makes use of the specified batch allocator. */ Redefining with(RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator); /** * A failure handler is responsible for reacting to failed type redefinitions. * * @param redefinitionFailureHandler The failure handler to apply. * @return A new instance of this agent builder which makes use of the specified failure handler. */ Redefining with(RedefinitionStrategy.FailureHandler redefinitionFailureHandler); /** * <p> * A redefinition listener is invoked before each batch of type redefinitions and on every error as well as * after the redefinition was completed. A redefinition listener can be used for debugging or logging purposes * and to apply actions between each batch, e.g. to pause or wait in order to avoid rendering the current VM * non-responsive if a lot of classes are redefined. * </p> * <p> * Adding several listeners does not replace previous listeners but applies them in the registration order. * </p> * * @param redefinitionListener The listener to register. * @return A new instance of this agent builder which notifies the specified listener upon type redefinitions. */ Redefining with(RedefinitionStrategy.Listener redefinitionListener); } /** * A matcher that allows to determine if a {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} * should be applied during the execution of a {@link java.lang.instrument.ClassFileTransformer} that was * generated by an {@link net.bytebuddy.agent.builder.AgentBuilder}. */ interface RawMatcher { /** * Decides if the given {@code typeDescription} should be instrumented with the entailed * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s. * * @param typeDescription A description of the type to be instrumented. * @param classLoader The class loader of the instrumented type. Might be {@code null} if this class * loader represents the bootstrap class loader. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined The class being redefined which is only not {@code null} if a retransformation * is applied. * @param protectionDomain The protection domain of the type being transformed. * @return {@code true} if the entailed {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s should * be applied for the given {@code typeDescription}. */ boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain); /** * A conjunction of two raw matchers. */ class Conjunction implements RawMatcher { /** * The left matcher which is applied first. */ private final RawMatcher left; /** * The right matcher which is applied second. */ private final RawMatcher right; /** * Creates a new conjunction of two raw matchers. * * @param left The left matcher which is applied first. * @param right The right matcher which is applied second. */ protected Conjunction(RawMatcher left, RawMatcher right) { this.left = left; this.right = right; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return left.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) && right.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Conjunction that = (Conjunction) object; return left.equals(that.left) && right.equals(that.right); } @Override public int hashCode() { int result = left.hashCode(); result = 31 * result + right.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.Conjunction{" + "left=" + left + ", right=" + right + '}'; } } /** * A disjunction of two raw matchers. */ class Disjunction implements RawMatcher { /** * The left matcher which is applied first. */ private final RawMatcher left; /** * The right matcher which is applied second. */ private final RawMatcher right; /** * Creates a new disjunction of two raw matchers. * * @param left The left matcher which is applied first. * @param right The right matcher which is applied second. */ protected Disjunction(RawMatcher left, RawMatcher right) { this.left = left; this.right = right; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return left.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) || right.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Disjunction that = (Disjunction) object; return left.equals(that.left) && right.equals(that.right); } @Override public int hashCode() { int result = left.hashCode(); result = 31 * result + right.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.Disjunction{" + "left=" + left + ", right=" + right + '}'; } } /** * A raw matcher implementation that checks a {@link TypeDescription} * and its {@link java.lang.ClassLoader} against two suitable matchers in order to determine if the matched * type should be instrumented. */ class ForElementMatchers implements RawMatcher { /** * The type matcher to apply to a {@link TypeDescription}. */ private final ElementMatcher<? super TypeDescription> typeMatcher; /** * The class loader matcher to apply to a {@link java.lang.ClassLoader}. */ private final ElementMatcher<? super ClassLoader> classLoaderMatcher; /** * A module matcher to apply to a {@code java.lang.reflect.Module}. */ private final ElementMatcher<? super JavaModule> moduleMatcher; /** * Creates a new {@link net.bytebuddy.agent.builder.AgentBuilder.RawMatcher} that only matches the * supplied {@link TypeDescription} and its {@link java.lang.ClassLoader} against two matcher in order * to decided if an instrumentation should be conducted. * * @param typeMatcher The type matcher to apply to a {@link TypeDescription}. * @param classLoaderMatcher The class loader matcher to apply to a {@link java.lang.ClassLoader}. * @param moduleMatcher A module matcher to apply to a {@code java.lang.reflect.Module}. */ public ForElementMatchers(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { this.typeMatcher = typeMatcher; this.classLoaderMatcher = classLoaderMatcher; this.moduleMatcher = moduleMatcher; } @Override public boolean matches(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain) { return moduleMatcher.matches(module) && classLoaderMatcher.matches(classLoader) && typeMatcher.matches(typeDescription); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && classLoaderMatcher.equals(((ForElementMatchers) other).classLoaderMatcher) && moduleMatcher.equals(((ForElementMatchers) other).moduleMatcher) && typeMatcher.equals(((ForElementMatchers) other).typeMatcher); } @Override public int hashCode() { int result = typeMatcher.hashCode(); result = 31 * result + classLoaderMatcher.hashCode(); result = 31 * result + moduleMatcher.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.RawMatcher.ForElementMatchers{" + "typeMatcher=" + typeMatcher + ", classLoaderMatcher=" + classLoaderMatcher + ", moduleMatcher=" + moduleMatcher + '}'; } } } /** * A listener that is informed about events that occur during an instrumentation process. */ interface Listener { /** * Invoked right before a successful transformation is applied. * * @param typeDescription The type that is being transformed. * @param classLoader The class loader which is loading this type. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param dynamicType The dynamic type that was created. */ void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType); /** * Invoked when a type is not transformed but ignored. * * @param typeDescription The type being ignored for transformation. * @param classLoader The class loader which is loading this type. * @param module The ignored type's module or {@code null} if the current VM does not support modules. */ void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module); /** * Invoked when an error has occurred during transformation. * * @param typeName The type name of the instrumented type. * @param classLoader The class loader which is loading this type. * @param module The instrumented type's module or {@code null} if the current VM does not support modules. * @param throwable The occurred error. */ void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable); /** * Invoked after a class was attempted to be loaded, independently of its treatment. * * @param typeName The binary name of the instrumented type. * @param classLoader The class loader which is loading this type. * @param module The instrumented type's module or {@code null} if the current VM does not support modules. */ void onComplete(String typeName, ClassLoader classLoader, JavaModule module); /** * A no-op implementation of a {@link net.bytebuddy.agent.builder.AgentBuilder.Listener}. */ enum NoOp implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { /* do nothing */ } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { /* do nothing */ } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.Listener.NoOp." + name(); } } /** * An adapter for a listener wher all methods are implemented as non-operational. */ abstract class Adapter implements Listener { @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { /* do nothing */ } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { /* do nothing */ } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { /* do nothing */ } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { /* do nothing */ } } /** * A listener that writes events to a {@link PrintStream}. This listener prints a line per event, including the event type and * the name of the type in question. */ class StreamWriting implements Listener { /** * The prefix that is appended to all written messages. */ protected static final String PREFIX = "[Byte Buddy]"; /** * The print stream written to. */ private final PrintStream printStream; /** * Creates a new stream writing listener. * * @param printStream The print stream written to. */ public StreamWriting(PrintStream printStream) { this.printStream = printStream; } /** * Creates a new stream writing listener that writes to {@link System#out}. * * @return A listener writing events to the standard output stream. */ public static Listener toSystemOut() { return new StreamWriting(System.out); } /** * Creates a new stream writing listener that writes to {@link System#err}. * * @return A listener writing events to the standad error stream. */ public static Listener toSystemError() { return new StreamWriting(System.err); } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { printStream.println(PREFIX + " TRANSFORM " + typeDescription.getName() + "[" + classLoader + ", " + module + "]"); } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { printStream.println(PREFIX + " IGNORE " + typeDescription.getName() + "[" + classLoader + ", " + module + "]"); } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { synchronized (printStream) { printStream.println(PREFIX + " ERROR " + typeName + "[" + classLoader + ", " + module + "]"); throwable.printStackTrace(printStream); } } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { printStream.println(PREFIX + " COMPLETE " + typeName + "[" + classLoader + ", " + module + "]"); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && printStream.equals(((StreamWriting) other).printStream); } @Override public int hashCode() { return printStream.hashCode(); } @Override public String toString() { return "AgentBuilder.Listener.StreamWriting{" + "printStream=" + printStream + '}'; } } /** * A listener that adds read-edges to any module of an instrumented class upon its transformation. */ class ModuleReadEdgeCompleting extends Listener.Adapter { /** * The instrumentation instance used for adding read edges. */ private final Instrumentation instrumentation; /** * {@code true} if the listener should also add a read-edge from the supplied modules to the instrumented type's module. */ private final boolean addTargetEdge; /** * The modules to add as a read edge to any transformed class's module. */ private final Set<? extends JavaModule> modules; /** * Creates a new module read-edge completing listener. * * @param instrumentation The instrumentation instance used for adding read edges. * @param addTargetEdge {@code true} if the listener should also add a read-edge from the supplied modules * to the instrumented type's module. * @param modules The modules to add as a read edge to any transformed class's module. */ public ModuleReadEdgeCompleting(Instrumentation instrumentation, boolean addTargetEdge, Set<? extends JavaModule> modules) { this.instrumentation = instrumentation; this.addTargetEdge = addTargetEdge; this.modules = modules; } /** * Resolves a listener that adds module edges from and to the instrumented type's module. * * @param instrumentation The instrumentation instance used for adding read edges. * @param addTargetEdge {@code true} if the listener should also add a read-edge from the supplied * modules to the instrumented type's module. * @param type The types for which to extract the modules. * @return An appropriate listener. */ protected static Listener of(Instrumentation instrumentation, boolean addTargetEdge, Class<?>... type) { Set<JavaModule> modules = new HashSet<JavaModule>(); for (Class<?> aType : type) { JavaModule module = JavaModule.ofType(aType); if (module.isNamed()) { modules.add(module); } } return modules.isEmpty() ? Listener.NoOp.INSTANCE : new Listener.ModuleReadEdgeCompleting(instrumentation, addTargetEdge, modules); } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { if (module != null && module.isNamed()) { for (JavaModule target : modules) { if (!module.canRead(target)) { module.addReads(instrumentation, target); } if (addTargetEdge && !target.canRead(module)) { target.addReads(instrumentation, module); } } } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ModuleReadEdgeCompleting that = (ModuleReadEdgeCompleting) object; return instrumentation.equals(that.instrumentation) && addTargetEdge == that.addTargetEdge && modules.equals(that.modules); } @Override public int hashCode() { int result = instrumentation.hashCode(); result = 31 * result + modules.hashCode(); result = 31 * result + (addTargetEdge ? 1 : 0); return result; } @Override public String toString() { return "AgentBuilder.Listener.ModuleReadEdgeCompleting{" + "instrumentation=" + instrumentation + ", addTargetEdge=" + addTargetEdge + ", modules=" + modules + '}'; } } /** * A compound listener that allows to group several listeners in one instance. */ class Compound implements Listener { /** * The listeners that are represented by this compound listener in their application order. */ private final List<? extends Listener> listeners; /** * Creates a new compound listener. * * @param listener The listeners to apply in their application order. */ public Compound(Listener... listener) { this(Arrays.asList(listener)); } /** * Creates a new compound listener. * * @param listeners The listeners to apply in their application order. */ public Compound(List<? extends Listener> listeners) { this.listeners = listeners; } @Override public void onTransformation(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, DynamicType dynamicType) { for (Listener listener : listeners) { listener.onTransformation(typeDescription, classLoader, module, dynamicType); } } @Override public void onIgnored(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { for (Listener listener : listeners) { listener.onIgnored(typeDescription, classLoader, module); } } @Override public void onError(String typeName, ClassLoader classLoader, JavaModule module, Throwable throwable) { for (Listener listener : listeners) { listener.onError(typeName, classLoader, module, throwable); } } @Override public void onComplete(String typeName, ClassLoader classLoader, JavaModule module) { for (Listener listener : listeners) { listener.onComplete(typeName, classLoader, module); } } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && listeners.equals(((Compound) other).listeners); } @Override public int hashCode() { return listeners.hashCode(); } @Override public String toString() { return "AgentBuilder.Listener.Compound{" + "listeners=" + listeners + '}'; } } } /** * A type strategy is responsible for creating a type builder for a type that is being instrumented. */ interface TypeStrategy { /** * Creates a type builder for a given type. * * @param typeDescription The type being instrumented. * @param byteBuddy The Byte Buddy configuration. * @param classFileLocator The class file locator to use. * @param methodNameTransformer The method name transformer to use. * @return A type builder for the given arguments. */ DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer); /** * Default implementations of type strategies. */ enum Default implements TypeStrategy { /** * A definition handler that performs a rebasing for all types. */ REBASE { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.rebase(typeDescription, classFileLocator, methodNameTransformer); } }, /** * <p> * A definition handler that performs a redefinition for all types. * </p> * <p> * Note that the default agent builder is configured to apply a self initialization where a static class initializer * is added to the redefined class. This can be disabled by for example using a {@link InitializationStrategy.Minimal} or * {@link InitializationStrategy.NoOp}. Also, consider the constraints implied by {@link ByteBuddy#redefine(TypeDescription, ClassFileLocator)}. * </p> * <p> * For prohibiting any changes on a class file, use {@link AgentBuilder#disableClassFormatChanges()} * </p> */ REDEFINE { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.redefine(typeDescription, classFileLocator); } }, /** * <p> * A definition handler that performs a redefinition for all types and ignores all methods that were not declared by the instrumented type. * </p> * <p> * Note that the default agent builder is configured to apply a self initialization where a static class initializer * is added to the redefined class. This can be disabled by for example using a {@link InitializationStrategy.Minimal} or * {@link InitializationStrategy.NoOp}. Also, consider the constraints implied by {@link ByteBuddy#redefine(TypeDescription, ClassFileLocator)}. * </p> * <p> * For prohibiting any changes on a class file, use {@link AgentBuilder#disableClassFormatChanges()} * </p> */ REDEFINE_DECLARED_ONLY { @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return byteBuddy.redefine(typeDescription, classFileLocator).ignoreAlso(LatentMatcher.ForSelfDeclaredMethod.NOT_DECLARED); } }; @Override public String toString() { return "AgentBuilder.TypeStrategy.Default." + name(); } } /** * A type strategy that applies a build {@link EntryPoint}. */ class ForBuildEntryPoint implements TypeStrategy { /** * The entry point to apply. */ private final EntryPoint entryPoint; /** * Creates a new type strategy for an entry point. * * @param entryPoint The entry point to apply. */ public ForBuildEntryPoint(EntryPoint entryPoint) { this.entryPoint = entryPoint; } @Override public DynamicType.Builder<?> builder(TypeDescription typeDescription, ByteBuddy byteBuddy, ClassFileLocator classFileLocator, MethodNameTransformer methodNameTransformer) { return entryPoint.transform(typeDescription, byteBuddy, classFileLocator, methodNameTransformer); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForBuildEntryPoint that = (ForBuildEntryPoint) object; return entryPoint.equals(that.entryPoint); } @Override public int hashCode() { return entryPoint.hashCode(); } @Override public String toString() { return "AgentBuilder.TypeStrategy.ForBuildEntryPoint{" + "entryPoint=" + entryPoint + '}'; } } } /** * A transformer allows to apply modifications to a {@link net.bytebuddy.dynamic.DynamicType}. Such a modification * is then applied to any instrumented type that was matched by the preceding matcher. */ interface Transformer { /** * Allows for a transformation of a {@link net.bytebuddy.dynamic.DynamicType.Builder}. * * @param builder The dynamic builder to transform. * @param typeDescription The description of the type currently being instrumented. * @param classLoader The class loader of the instrumented class. Might be {@code null} to * represent the bootstrap class loader. * @return A transformed version of the supplied {@code builder}. */ DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader); /** * A no-op implementation of a {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer} that does * not modify the supplied dynamic type. */ enum NoOp implements Transformer { /** * The singleton instance. */ INSTANCE; @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { return builder; } @Override public String toString() { return "AgentBuilder.Transformer.NoOp." + name(); } } /** * A transformer that applies a build {@link Plugin}. */ class ForBuildPlugin implements Transformer { /** * The plugin to apply. */ private final Plugin plugin; /** * Creates a new transformer for a build {@link Plugin}. * * @param plugin The plugin to apply. */ public ForBuildPlugin(Plugin plugin) { this.plugin = plugin; } @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { return plugin.apply(builder, typeDescription); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForBuildPlugin that = (ForBuildPlugin) object; return plugin.equals(that.plugin); } @Override public int hashCode() { return plugin.hashCode(); } @Override public String toString() { return "AgentBuilder.Transformer.ForBuildPlugin{" + "plugin=" + plugin + '}'; } } /** * A compound transformer that allows to group several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s as a single transformer. */ class Compound implements Transformer { /** * The transformers to apply in their application order. */ private final Transformer[] transformer; /** * Creates a new compound transformer. * * @param transformer The transformers to apply in their application order. */ public Compound(Transformer... transformer) { this.transformer = transformer; } @Override public DynamicType.Builder<?> transform(DynamicType.Builder<?> builder, TypeDescription typeDescription, ClassLoader classLoader) { for (Transformer transformer : this.transformer) { builder = transformer.transform(builder, typeDescription, classLoader); } return builder; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && Arrays.equals(transformer, ((Compound) other).transformer); } @Override public int hashCode() { return Arrays.hashCode(transformer); } @Override public String toString() { return "AgentBuilder.Transformer.Compound{" + "transformer=" + Arrays.toString(transformer) + '}'; } } } /** * A type locator allows to specify how {@link TypeDescription}s are resolved by an {@link net.bytebuddy.agent.builder.AgentBuilder}. */ interface PoolStrategy { /** * Creates a type pool for a given class file locator. * * @param classFileLocator The class file locator to use. * @param classLoader The class loader for which the class file locator was created. * @return A type pool for the supplied class file locator. */ TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader); /** * <p> * A default type locator that resolves types only if any property that is not the type's name is requested. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. * </p> */ enum Default implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ Default(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default.WithLazyResolution(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode); } @Override public String toString() { return "AgentBuilder.PoolStrategy.Default." + name(); } } /** * <p> * A type locator that resolves all type descriptions eagerly. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. * </p> */ enum Eager implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ Eager(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode); } @Override public String toString() { return "AgentBuilder.PoolStrategy.Eager." + name(); } } /** * <p> * A type locator that attempts loading a type if it cannot be located by the underlying lazy type pool. * </p> * <p> * The returned type pool uses a {@link net.bytebuddy.pool.TypePool.CacheProvider.Simple} and the * {@link ClassFileLocator} that is provided by the builder's {@link LocationStrategy}. Any types * are loaded via the instrumented type's {@link ClassLoader}. * </p> */ enum ClassLoading implements PoolStrategy { /** * A type locator that parses the code segment of each method for extracting information about parameter * names even if they are not explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#EXTENDED */ EXTENDED(TypePool.Default.ReaderMode.EXTENDED), /** * A type locator that skips the code segment of each method and does therefore not extract information * about parameter names. Parameter names are still included if they are explicitly included in a class file. * * @see net.bytebuddy.pool.TypePool.Default.ReaderMode#FAST */ FAST(TypePool.Default.ReaderMode.FAST); /** * The reader mode to apply by this type locator. */ private final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator. * * @param readerMode The reader mode to apply by this type locator. */ ClassLoading(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return TypePool.ClassLoading.of(classLoader, new TypePool.Default.WithLazyResolution(TypePool.CacheProvider.Simple.withObjectType(), classFileLocator, readerMode)); } @Override public String toString() { return "AgentBuilder.PoolStrategy.ClassLoading." + name(); } } /** * <p> * A type locator that uses type pools but allows for the configuration of a custom cache provider by class loader. Note that a * {@link TypePool} can grow in size and that a static reference is kept to this pool by Byte Buddy's registration of a * {@link ClassFileTransformer} what can cause a memory leak if the supplied caches are not cleared on a regular basis. Also note * that a cache provider can be accessed concurrently by multiple {@link ClassLoader}s. * </p> * <p> * All types that are returned by the locator's type pool are resolved lazily. * </p> */ abstract class WithTypePoolCache implements PoolStrategy { /** * The reader mode to use for parsing a class file. */ protected final TypePool.Default.ReaderMode readerMode; /** * Creates a new type locator that creates {@link TypePool}s but provides a custom {@link net.bytebuddy.pool.TypePool.CacheProvider}. * * @param readerMode The reader mode to use for parsing a class file. */ protected WithTypePoolCache(TypePool.Default.ReaderMode readerMode) { this.readerMode = readerMode; } @Override public TypePool typePool(ClassFileLocator classFileLocator, ClassLoader classLoader) { return new TypePool.Default.WithLazyResolution(locate(classLoader), classFileLocator, readerMode); } /** * Locates a cache provider for a given class loader. * * @param classLoader The class loader for which to locate a cache. This class loader might be {@code null} to represent the bootstrap loader. * @return The cache provider to use. */ protected abstract TypePool.CacheProvider locate(ClassLoader classLoader); @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; WithTypePoolCache that = (WithTypePoolCache) object; return readerMode == that.readerMode; } @Override public int hashCode() { return readerMode.hashCode(); } /** * An implementation of a type locator {@link WithTypePoolCache} (note documentation of the linked class) that is based on a * {@link ConcurrentMap}. It is the responsibility of the type locator's user to avoid the type locator from leaking memory. */ public static class Simple extends WithTypePoolCache { /** * The concurrent map that is used for storing a cache provider per class loader. */ private final ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders; /** * Creates a new type locator that caches a cache provider per class loader in a concurrent map. The type * locator uses a fast {@link net.bytebuddy.pool.TypePool.Default.ReaderMode}. * * @param cacheProviders The concurrent map that is used for storing a cache provider per class loader. */ public Simple(ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders) { this(TypePool.Default.ReaderMode.FAST, cacheProviders); } /** * Creates a new type locator that caches a cache provider per class loader in a concurrent map. * * @param readerMode The reader mode to use for parsing a class file. * @param cacheProviders The concurrent map that is used for storing a cache provider per class loader. */ public Simple(TypePool.Default.ReaderMode readerMode, ConcurrentMap<? super ClassLoader, TypePool.CacheProvider> cacheProviders) { super(readerMode); this.cacheProviders = cacheProviders; } @Override protected TypePool.CacheProvider locate(ClassLoader classLoader) { classLoader = classLoader == null ? getBootstrapMarkerLoader() : classLoader; TypePool.CacheProvider cacheProvider = cacheProviders.get(classLoader); while (cacheProvider == null) { cacheProvider = TypePool.CacheProvider.Simple.withObjectType(); TypePool.CacheProvider previous = cacheProviders.putIfAbsent(classLoader, cacheProvider); if (previous != null) { cacheProvider = previous; } } return cacheProvider; } /** * <p> * Returns the class loader to serve as a cache key if a cache provider for the bootstrap class loader is requested. * This class loader is represented by {@code null} in the JVM which is an invalid value for many {@link ConcurrentMap} * implementations. * </p> * <p> * By default, {@link ClassLoader#getSystemClassLoader()} is used as such a key as any resource location for the * bootstrap class loader is performed via the system class loader within Byte Buddy as {@code null} cannot be queried * for resources via method calls such that this does not make a difference. * </p> * * @return A class loader to represent the bootstrap class loader. */ protected ClassLoader getBootstrapMarkerLoader() { return ClassLoader.getSystemClassLoader(); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; if (!super.equals(object)) return false; Simple simple = (Simple) object; return cacheProviders.equals(simple.cacheProviders); } @Override public int hashCode() { int result = super.hashCode(); result = 31 * result + cacheProviders.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.PoolStrategy.WithTypePoolCache.Simple{" + "cacheProviders=" + cacheProviders + '}'; } } } } /** * An initialization strategy which determines the handling of {@link net.bytebuddy.implementation.LoadedTypeInitializer}s * and the loading of auxiliary types. The agent builder does not reuse the {@link TypeResolutionStrategy} as Javaagents cannot access * a loaded class after a transformation such that different initialization strategies become meaningful. */ interface InitializationStrategy { /** * Creates a new dispatcher for injecting this initialization strategy during a transformation process. * * @return The dispatcher to be used. */ Dispatcher dispatcher(); /** * A dispatcher for changing a class file to adapt a self-initialization strategy. */ interface Dispatcher { /** * Transforms the instrumented type to implement an appropriate initialization strategy. * * @param builder The builder which should implement the initialization strategy. * @return The given {@code builder} with the initialization strategy applied. */ DynamicType.Builder<?> apply(DynamicType.Builder<?> builder); /** * Registers a dynamic type for initialization and/or begins the initialization process. * * @param dynamicType The dynamic type that is created. * @param classLoader The class loader of the dynamic type. * @param injectorFactory The injector factory */ void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory); /** * A factory for creating a {@link ClassInjector} only if it is required. */ interface InjectorFactory { /** * Resolves the class injector for this factory. * * @return The class injector for this factory. */ ClassInjector resolve(); } } /** * A non-initializing initialization strategy. */ enum NoOp implements InitializationStrategy, Dispatcher { /** * The singleton instance. */ INSTANCE; @Override public Dispatcher dispatcher() { return this; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder; } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.InitializationStrategy.NoOp." + name(); } } /** * An initialization strategy that adds a code block to an instrumented type's type initializer which * then calls a specific class that is responsible for the explicit initialization. */ @SuppressFBWarnings(value = "DMI_RANDOM_USED_ONLY_ONCE", justification = "Avoiding synchronization without security concerns") enum SelfInjection implements InitializationStrategy { /** * A form of self-injection where auxiliary types that are annotated by * {@link net.bytebuddy.implementation.auxiliary.AuxiliaryType.SignatureRelevant} of the instrumented type are loaded lazily and * any other auxiliary type is loaded eagerly. */ SPLIT { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Split(new Random().nextInt()); } }, /** * A form of self-injection where any auxiliary type is loaded lazily. */ LAZY { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Lazy(new Random().nextInt()); } }, /** * A form of self-injection where any auxiliary type is loaded eagerly. */ EAGER { @Override public InitializationStrategy.Dispatcher dispatcher() { return new SelfInjection.Dispatcher.Eager(new Random().nextInt()); } }; @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection." + name(); } /** * A dispatcher for a self-initialization strategy. */ protected abstract static class Dispatcher implements InitializationStrategy.Dispatcher { /** * A random identification for the applied self-initialization. */ protected final int identification; /** * Creates a new dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Dispatcher(int identification) { this.identification = identification; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder.initializer(new NexusAccessor.InitializationAppender(identification)); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && identification == ((Dispatcher) other).identification; } @Override public int hashCode() { return identification; } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#SPLIT} strategy. */ protected static class Split extends Dispatcher { /** * Creates a new split dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Split(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); LoadedTypeInitializer loadedTypeInitializer; if (!auxiliaryTypes.isEmpty()) { TypeDescription instrumentedType = dynamicType.getTypeDescription(); ClassInjector classInjector = injectorFactory.resolve(); Map<TypeDescription, byte[]> independentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); Map<TypeDescription, byte[]> dependentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); for (TypeDescription auxiliaryType : auxiliaryTypes.keySet()) { (auxiliaryType.getDeclaredAnnotations().isAnnotationPresent(AuxiliaryType.SignatureRelevant.class) ? dependentTypes : independentTypes).remove(auxiliaryType); } Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); if (!independentTypes.isEmpty()) { for (Map.Entry<TypeDescription, Class<?>> entry : classInjector.inject(independentTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } Map<TypeDescription, LoadedTypeInitializer> lazyInitializers = new HashMap<TypeDescription, LoadedTypeInitializer>(loadedTypeInitializers); loadedTypeInitializers.keySet().removeAll(independentTypes.keySet()); loadedTypeInitializer = lazyInitializers.size() > 1 // there exist auxiliary types that need lazy loading ? new InjectingInitializer(instrumentedType, dependentTypes, lazyInitializers, classInjector) : lazyInitializers.get(instrumentedType); } else { loadedTypeInitializer = dynamicType.getLoadedTypeInitializers().get(dynamicType.getTypeDescription()); } NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Split{identification=" + identification + "}"; } } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#LAZY} strategy. */ protected static class Lazy extends Dispatcher { /** * Creates a new lazy dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Lazy(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); LoadedTypeInitializer loadedTypeInitializer = auxiliaryTypes.isEmpty() ? dynamicType.getLoadedTypeInitializers().get(dynamicType.getTypeDescription()) : new InjectingInitializer(dynamicType.getTypeDescription(), auxiliaryTypes, dynamicType.getLoadedTypeInitializers(), injectorFactory.resolve()); NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Lazy{identification=" + identification + "}"; } } /** * A dispatcher for the {@link net.bytebuddy.agent.builder.AgentBuilder.InitializationStrategy.SelfInjection#EAGER} strategy. */ protected static class Eager extends Dispatcher { /** * Creates a new eager dispatcher. * * @param identification A random identification for the applied self-initialization. */ protected Eager(int identification) { super(identification); } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); if (!auxiliaryTypes.isEmpty()) { for (Map.Entry<TypeDescription, Class<?>> entry : injectorFactory.resolve().inject(auxiliaryTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } LoadedTypeInitializer loadedTypeInitializer = loadedTypeInitializers.get(dynamicType.getTypeDescription()); NexusAccessor.INSTANCE.register(dynamicType.getTypeDescription().getName(), classLoader, identification, loadedTypeInitializer); } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.Eager{identification=" + identification + "}"; } } /** * A type initializer that injects all auxiliary types of the instrumented type. */ protected static class InjectingInitializer implements LoadedTypeInitializer { /** * The instrumented type. */ private final TypeDescription instrumentedType; /** * The auxiliary types mapped to their class file representation. */ private final Map<TypeDescription, byte[]> rawAuxiliaryTypes; /** * The instrumented types and auxiliary types mapped to their loaded type initializers. * The instrumented types and auxiliary types mapped to their loaded type initializers. */ private final Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers; /** * The class injector to use. */ private final ClassInjector classInjector; /** * Creates a new injection initializer. * * @param instrumentedType The instrumented type. * @param rawAuxiliaryTypes The auxiliary types mapped to their class file representation. * @param loadedTypeInitializers The instrumented types and auxiliary types mapped to their loaded type initializers. * @param classInjector The class injector to use. */ protected InjectingInitializer(TypeDescription instrumentedType, Map<TypeDescription, byte[]> rawAuxiliaryTypes, Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers, ClassInjector classInjector) { this.instrumentedType = instrumentedType; this.rawAuxiliaryTypes = rawAuxiliaryTypes; this.loadedTypeInitializers = loadedTypeInitializers; this.classInjector = classInjector; } @Override public void onLoad(Class<?> type) { for (Map.Entry<TypeDescription, Class<?>> auxiliary : classInjector.inject(rawAuxiliaryTypes).entrySet()) { loadedTypeInitializers.get(auxiliary.getKey()).onLoad(auxiliary.getValue()); } loadedTypeInitializers.get(instrumentedType).onLoad(type); } @Override public boolean isAlive() { return true; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; InjectingInitializer that = (InjectingInitializer) o; return classInjector.equals(that.classInjector) && instrumentedType.equals(that.instrumentedType) && rawAuxiliaryTypes.equals(that.rawAuxiliaryTypes) && loadedTypeInitializers.equals(that.loadedTypeInitializers); } @Override public int hashCode() { int result = instrumentedType.hashCode(); result = 31 * result + rawAuxiliaryTypes.hashCode(); result = 31 * result + loadedTypeInitializers.hashCode(); result = 31 * result + classInjector.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.InitializationStrategy.SelfInjection.Dispatcher.InjectingInitializer{" + "instrumentedType=" + instrumentedType + ", rawAuxiliaryTypes=" + rawAuxiliaryTypes + ", loadedTypeInitializers=" + loadedTypeInitializers + ", classInjector=" + classInjector + '}'; } } } } /** * An initialization strategy that loads auxiliary types before loading the instrumented type. This strategy skips all types * that are a subtype of the instrumented type which would cause a premature loading of the instrumented type and abort * the instrumentation process. */ enum Minimal implements InitializationStrategy, Dispatcher { /** * The singleton instance. */ INSTANCE; @Override public Dispatcher dispatcher() { return this; } @Override public DynamicType.Builder<?> apply(DynamicType.Builder<?> builder) { return builder; } @Override public void register(DynamicType dynamicType, ClassLoader classLoader, InjectorFactory injectorFactory) { Map<TypeDescription, byte[]> auxiliaryTypes = dynamicType.getAuxiliaryTypes(); Map<TypeDescription, byte[]> independentTypes = new LinkedHashMap<TypeDescription, byte[]>(auxiliaryTypes); for (TypeDescription auxiliaryType : auxiliaryTypes.keySet()) { if (!auxiliaryType.getDeclaredAnnotations().isAnnotationPresent(AuxiliaryType.SignatureRelevant.class)) { independentTypes.remove(auxiliaryType); } } if (!independentTypes.isEmpty()) { ClassInjector classInjector = injectorFactory.resolve(); Map<TypeDescription, LoadedTypeInitializer> loadedTypeInitializers = dynamicType.getLoadedTypeInitializers(); for (Map.Entry<TypeDescription, Class<?>> entry : classInjector.inject(independentTypes).entrySet()) { loadedTypeInitializers.get(entry.getKey()).onLoad(entry.getValue()); } } } @Override public String toString() { return "AgentBuilder.InitializationStrategy.Minimal." + name(); } } } /** * A description strategy is responsible for resolving a {@link TypeDescription} when transforming or retransforming/-defining a type. */ interface DescriptionStrategy { /** * Describes the given type. * * @param typeName The binary name of the type to describe. * @param type The type that is being redefined, if a redefinition is applied or {@code null} if no redefined type is available. * @param typePool The type pool to use for locating a type if required. * @return An appropriate type description. */ TypeDescription apply(String typeName, Class<?> type, TypePool typePool); /** * Indicates if this description strategy makes use of loaded type information and yields a different type description if no loaded type is available. * * @return {@code true} if this description strategy prefers loaded type information when describing a type and only uses a type pool * if loaded type information is not available. */ boolean isLoadedFirst(); /** * Default implementations of a {@link DescriptionStrategy}. */ enum Default implements DescriptionStrategy { /** * A description type strategy represents a type as a {@link net.bytebuddy.description.type.TypeDescription.ForLoadedType} if a * retransformation or redefinition is applied on a type. Using a loaded type typically results in better performance as no * I/O is required for resolving type descriptions. However, any interaction with the type is carried out via the Java reflection * API. Using the reflection API triggers eager loading of any type that is part of a method or field signature. If any of these * types are missing from the class path, this eager loading will cause a {@link NoClassDefFoundError}. Some Java code declares * optional dependencies to other classes which are only realized if the optional dependency is present. Such code relies on the * Java reflection API not being used for types using optional dependencies. * * @see FallbackStrategy.Simple#ENABLED * @see FallbackStrategy.ByThrowableType#ofOptionalTypes() */ HYBRID(true) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { return type == null ? typePool.describe(typeName).resolve() : new TypeDescription.ForLoadedType(type); } }, /** * <p> * A description strategy that always describes Java types using a {@link TypePool}. This requires that any type - even if it is already * loaded and a {@link Class} instance is available - is processed as a non-loaded type description. Doing so can cause overhead as processing * loaded types is supported very efficiently by a JVM. * </p> * <p> * Avoiding the usage of loaded types can improve robustness as this approach does not rely on the Java reflection API which triggers eager * validation of this loaded type which can fail an application if optional types are used by any types field or method signatures. Also, it * is possible to guarantee debugging meta data to be available also for retransformed or redefined types if a {@link TypeStrategy} specifies * the extraction of such meta data. * </p> */ POOL_ONLY(false) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { return typePool.describe(typeName).resolve(); } }, /** * <p> * A description strategy that always describes Java types using a {@link TypePool} unless a type cannot be resolved by a pool and a loaded * {@link Class} instance is available. Doing so can cause overhead as processing loaded types is supported very efficiently by a JVM. * </p> * <p> * Avoiding the usage of loaded types can improve robustness as this approach does not rely on the Java reflection API which triggers eager * validation of this loaded type which can fail an application if optional types are used by any types field or method signatures. Also, it * is possible to guarantee debugging meta data to be available also for retransformed or redefined types if a {@link TypeStrategy} specifies * the extraction of such meta data. * </p> */ POOL_FIRST(false) { @Override public TypeDescription apply(String typeName, Class<?> type, TypePool typePool) { TypePool.Resolution resolution = typePool.describe(typeName); return resolution.isResolved() || type == null ? resolution.resolve() : new TypeDescription.ForLoadedType(type); } }; /** * Indicates if loaded type information is preferred over using a type pool for describing a type. */ private final boolean loadedFirst; /** * Indicates if loaded type information is preferred over using a type pool for describing a type. * * @param loadedFirst {@code true} if loaded type information is preferred over using a type pool for describing a type. */ Default(boolean loadedFirst) { this.loadedFirst = loadedFirst; } @Override public boolean isLoadedFirst() { return loadedFirst; } @Override public String toString() { return "AgentBuilder.DescriptionStrategy.Default." + name(); } } } /** * An installation strategy determines the reaction to a raised exception after the registration of a {@link ClassFileTransformer}. */ interface InstallationStrategy { /** * Handles an error that occured after registering a class file transformer during installation. * * @param instrumentation The instrumentation onto which the class file transformer was registered. * @param classFileTransformer The class file transformer that was registered. * @param throwable The error that occurred. * @return The class file transformer to return when an error occurred. */ ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable); /** * Default implementations of installation strategies. */ enum Default implements InstallationStrategy { /** * <p> * An installation strategy that unregisters the transformer and propagates the exception. Using this strategy does not guarantee * that the registered transformer was not applied to any class, nor does it attempt to revert previous transformations. It only * guarantees that the class file transformer is unregistered and does no longer apply after this method returns. * </p> * <p> * <b>Note</b>: This installation strategy does not undo any applied class redefinitions, if such were applied. * </p> */ ESCALATING { @Override public ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable) { instrumentation.removeTransformer(classFileTransformer); throw new IllegalStateException("Could not install class file transformer", throwable); } }, /** * An installation strategy that retains the class file transformer and suppresses the error. */ SUPPRESSING { @Override public ResettableClassFileTransformer onError(Instrumentation instrumentation, ResettableClassFileTransformer classFileTransformer, Throwable throwable) { return classFileTransformer; } }; @Override public String toString() { return "AgentBuilder.InstallationStrategy.Default." + name(); } } } /** * A strategy for creating a {@link ClassFileLocator} when instrumenting a type. */ interface LocationStrategy { /** * Creates a class file locator for a given class loader and module combination. * * @param classLoader The class loader that is loading an instrumented type. Might be {@code null} to represent the bootstrap class loader. * @param module The type's module or {@code null} if Java modules are not supported on the current VM. * @return The class file locator to use. */ ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module); /** * A location strategy that never locates any byte code. */ enum NoOp implements LocationStrategy { /** * The singleton instance. */ INSTANCE; @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.NoOp.INSTANCE; } @Override public String toString() { return "AgentBuilder.LocationStrategy.NoOp." + name(); } } /** * A location strategy that locates class files by querying an instrumented type's {@link ClassLoader}. */ enum ForClassLoader implements LocationStrategy { /** * A location strategy that keeps a strong reference to the class loader the created class file locator represents. */ STRONG { @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.ForClassLoader.of(classLoader); } }, /** * A location strategy that keeps a weak reference to the class loader the created class file locator represents. * As a consequence, any returned class file locator stops working once the represented class loader is garbage collected. */ WEAK { @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return ClassFileLocator.ForClassLoader.WeaklyReferenced.of(classLoader); } }; /** * Adds additional location strategies as fallbacks to this location strategy. * * @param classFileLocator The class file locators to query if this location strategy cannot locate a class file. * @return A compound location strategy that first applies this location strategy and then queries the supplied class file locators. */ public LocationStrategy withFallbackTo(ClassFileLocator... classFileLocator) { return withFallbackTo(Arrays.asList(classFileLocator)); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param classFileLocators The class file locators to query if this location strategy cannot locate a class file. * @return A compound location strategy that first applies this location strategy and then queries the supplied class file locators. */ public LocationStrategy withFallbackTo(Collection<? extends ClassFileLocator> classFileLocators) { List<LocationStrategy> locationStrategies = new ArrayList<LocationStrategy>(classFileLocators.size()); for (ClassFileLocator classFileLocator : classFileLocators) { locationStrategies.add(new Simple(classFileLocator)); } return withFallbackTo(locationStrategies); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param locationStrategy The fallback location strategies to use. * @return A compound location strategy that first applies this location strategy and then the supplied fallback location strategies * in the supplied order. */ public LocationStrategy withFallbackTo(LocationStrategy... locationStrategy) { return withFallbackTo(Arrays.asList(locationStrategy)); } /** * Adds additional location strategies as fallbacks to this location strategy. * * @param locationStrategies The fallback location strategies to use. * @return A compound location strategy that first applies this location strategy and then the supplied fallback location strategies * in the supplied order. */ public LocationStrategy withFallbackTo(List<? extends LocationStrategy> locationStrategies) { List<LocationStrategy> allLocationStrategies = new ArrayList<LocationStrategy>(locationStrategies.size() + 1); allLocationStrategies.add(this); allLocationStrategies.addAll(locationStrategies); return new Compound(allLocationStrategies); } @Override public String toString() { return "AgentBuilder.LocationStrategy.ForClassLoader." + name(); } } /** * A simple location strategy that queries a given class file locator. */ class Simple implements LocationStrategy { /** * The class file locator to query. */ private final ClassFileLocator classFileLocator; /** * A simple location strategy that queries a given class file locator. * * @param classFileLocator The class file locator to query. */ public Simple(ClassFileLocator classFileLocator) { this.classFileLocator = classFileLocator; } @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { return classFileLocator; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Simple simple = (Simple) object; return classFileLocator.equals(simple.classFileLocator); } @Override public int hashCode() { return classFileLocator.hashCode(); } @Override public String toString() { return "AgentBuilder.LocationStrategy.Simple{" + "classFileLocator=" + classFileLocator + '}'; } } /** * A compound location strategy that applies a list of location strategies. */ class Compound implements LocationStrategy { /** * The location strategies in their application order. */ private final List<? extends LocationStrategy> locationStrategies; /** * Creates a new compound location strategy. * * @param locationStrategy The location strategies in their application order. */ public Compound(LocationStrategy... locationStrategy) { this(Arrays.asList(locationStrategy)); } /** * Creates a new compound location strategy. * * @param locationStrategies The location strategies in their application order. */ public Compound(List<? extends LocationStrategy> locationStrategies) { this.locationStrategies = locationStrategies; } @Override public ClassFileLocator classFileLocator(ClassLoader classLoader, JavaModule module) { List<ClassFileLocator> classFileLocators = new ArrayList<ClassFileLocator>(locationStrategies.size()); for (LocationStrategy locationStrategy : locationStrategies) { classFileLocators.add(locationStrategy.classFileLocator(classLoader, module)); } return new ClassFileLocator.Compound(classFileLocators); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Compound compound = (Compound) object; return locationStrategies.equals(compound.locationStrategies); } @Override public int hashCode() { return locationStrategies.hashCode(); } @Override public String toString() { return "AgentBuilder.LocationStrategy.Compound{" + "locationStrategies=" + locationStrategies + '}'; } } } /** * A fallback strategy allows to reattempt a transformation or a consideration for redefinition/retransformation in case an exception * occurs. Doing so, it is possible to use a {@link TypePool} rather than using a loaded type description backed by a {@link Class}. * Loaded types can raise exceptions and errors if a {@link ClassLoader} cannot resolve all types that this class references. Using * a type pool, such errors can be avoided as type descriptions can be resolved lazily, avoiding such errors. */ interface FallbackStrategy { /** * Returns {@code true} if the supplied type and throwable combination should result in a reattempt where the * loaded type is not used for querying information. * * @param type The loaded type that was queried during the transformation attempt. * @param throwable The error or exception that was caused during the transformation. * @return {@code true} if the supplied type and throwable combination should */ boolean isFallback(Class<?> type, Throwable throwable); /** * A simple fallback strategy that either always reattempts a transformation or never does so. */ enum Simple implements FallbackStrategy { /** * An enabled fallback strategy that always attempts a new trial. */ ENABLED(true), /** * A disabled fallback strategy that never attempts a new trial. */ DISABLED(false); /** * {@code true} if this fallback strategy is enabled. */ private final boolean enabled; /** * Creates a new default fallback strategy. * * @param enabled {@code true} if this fallback strategy is enabled. */ Simple(boolean enabled) { this.enabled = enabled; } @Override public boolean isFallback(Class<?> type, Throwable throwable) { return enabled; } @Override public String toString() { return "AgentBuilder.FallbackStrategy.Simple." + name(); } } /** * A fallback strategy that discriminates by the type of the {@link Throwable} that triggered a request. */ class ByThrowableType implements FallbackStrategy { /** * A set of throwable types that should trigger a fallback attempt. */ private final Set<? extends Class<? extends Throwable>> types; /** * Creates a new throwable type-discriminating fallback strategy. * * @param type The throwable types that should trigger a fallback. */ @SuppressWarnings("unchecked") // In absence of @SafeVarargs for Java 6 public ByThrowableType(Class<? extends Throwable>... type) { this(new HashSet<Class<? extends Throwable>>(Arrays.asList(type))); } /** * Creates a new throwable type-discriminating fallback strategy. * * @param types The throwable types that should trigger a fallback. */ public ByThrowableType(Set<? extends Class<? extends Throwable>> types) { this.types = types; } /** * Creates a fallback strategy that attempts a fallback if an error indicating a type error is the reason for requesting a reattempt. * * @return A fallback strategy that triggers a reattempt if a {@link LinkageError} or a {@link TypeNotPresentException} is raised. */ @SuppressWarnings("unchecked") // In absence of @SafeVarargs for Java 6 public static FallbackStrategy ofOptionalTypes() { return new ByThrowableType(LinkageError.class, TypeNotPresentException.class); } @Override public boolean isFallback(Class<?> type, Throwable throwable) { for (Class<? extends Throwable> aType : types) { if (aType.isInstance(throwable)) { return true; } } return false; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ByThrowableType byType = (ByThrowableType) object; return types.equals(byType.types); } @Override public int hashCode() { return types.hashCode(); } @Override public String toString() { return "AgentBuilder.FallbackStrategy.ByThrowableType{" + "types=" + types + '}'; } } } /** * <p> * A redefinition strategy regulates how already loaded classes are modified by a built agent. * </p> * <p> * <b>Important</b>: Most JVMs do not support changes of a class's structure after a class was already * loaded. Therefore, it is typically required that this class file transformer was built while enabling * {@link AgentBuilder#disableClassFormatChanges()}. * </p> */ enum RedefinitionStrategy { /** * Disables redefinition such that already loaded classes are not affected by the agent. */ DISABLED { @Override protected boolean isRetransforming(Instrumentation instrumentation) { return false; } @Override protected Delegate<?> make(Default.Transformation transformation) { throw new IllegalStateException("A disabled redefinition strategy cannot create a collector"); } }, /** * <p> * Applies a <b>redefinition</b> to all classes that are already loaded and that would have been transformed if * the built agent was registered before they were loaded. The created {@link ClassFileTransformer} is <b>not</b> * registered for applying retransformations. * </p> * <p> * Using this strategy, a redefinition is applied as a single transformation request. This means that a single illegal * redefinition of a class causes the entire redefinition attempt to fail. * </p> * <p> * <b>Note</b>: When applying a redefinition, it is normally required to use a {@link TypeStrategy} that applies * a redefinition instead of rebasing classes such as {@link TypeStrategy.Default#REDEFINE}. Also, consider * the constrains given by this type strategy. * </p> */ REDEFINITION { @Override protected boolean isRetransforming(Instrumentation instrumentation) { if (!instrumentation.isRedefineClassesSupported()) { throw new IllegalArgumentException("Cannot redefine classes: " + instrumentation); } return false; } @Override protected Delegate<?> make(Default.Transformation transformation) { return new Delegate.ForRedefinition(transformation); } }, /** * <p> * Applies a <b>retransformation</b> to all classes that are already loaded and that would have been transformed if * the built agent was registered before they were loaded. The created {@link ClassFileTransformer} is registered * for applying retransformations. * </p> * <p> * Using this strategy, a retransformation is applied as a single transformation request. This means that a single illegal * retransformation of a class causes the entire retransformation attempt to fail. * </p> * <p> * <b>Note</b>: When applying a redefinition, it is normally required to use a {@link TypeStrategy} that applies * a redefinition instead of rebasing classes such as {@link TypeStrategy.Default#REDEFINE}. Also, consider * the constrains given by this type strategy. * </p> */ RETRANSFORMATION { @Override protected boolean isRetransforming(Instrumentation instrumentation) { if (!instrumentation.isRetransformClassesSupported()) { throw new IllegalArgumentException("Cannot retransform classes: " + instrumentation); } return true; } @Override protected Delegate<?> make(Default.Transformation transformation) { return new Delegate.ForRetransformation(transformation); } }; /** * Indicates if this strategy requires a class file transformer to be registered with a hint to apply the * transformer for retransformation. * * @param instrumentation The instrumentation instance used. * @return {@code true} if a class file transformer must be registered with a hint for retransformation. */ protected abstract boolean isRetransforming(Instrumentation instrumentation); /** * Indicates that this redefinition strategy applies a modification of already loaded classes. * * @return {@code true} if this redefinition strategy applies a modification of already loaded classes. */ protected boolean isEnabled() { return this != DISABLED; } /** * Creates a collector instance that is responsible for collecting loaded classes for potential retransformation. * * @param transformation The transformation that is registered for the agent. * @return A new collector for collecting already loaded classes for transformation. */ protected abstract Delegate<?> make(Default.Transformation transformation); @Override public String toString() { return "AgentBuilder.RedefinitionStrategy." + name(); } /** * A batch allocator which is responsible for applying a redefinition in a batches. A class redefinition or * retransformation can be a time-consuming operation rendering a JVM non-responsive. In combination with a * a {@link RedefinitionStrategy.Listener}, it is also possible to apply pauses between batches to distribute * the load of a retransformation over time. */ public interface BatchAllocator { /** * Splits a list of types to be retransformed into seperate batches. * * @param types A list of types which should be retransformed. * @return An iterable of retransformations within a batch. */ Iterable<? extends List<Class<?>>> batch(List<Class<?>> types); /** * A batch allocator that includes all types in a single batch. */ enum ForTotal implements BatchAllocator { /** * The singleton instance. */ INSTANCE; @Override public Iterable<? extends List<Class<?>>> batch(List<Class<?>> types) { return Collections.singleton(types); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.BatchAllocator.ForTotal." + name(); } } /** * A batch allocator that creates chunks with a fixed size as batch jobs. */ class ForFixedSize implements BatchAllocator { /** * The size of each chunk. */ private final int size; /** * Creates a new batch allocator that creates fixed-sized chunks. * * @param size The size of each chunk. */ protected ForFixedSize(int size) { this.size = size; } /** * Creates a new batch allocator that creates chunks of a fixed size. * * @param size The size of each chunk or {@code 0} if the batch should be included in a single chunk. * @return An appropriate batch allocator. */ public static BatchAllocator ofSize(int size) { if (size > 0) { return new ForFixedSize(size); } else if (size == 0) { return ForTotal.INSTANCE; } else { throw new IllegalArgumentException("Cannot define a batch with a negative size: " + size); } } @Override public Iterable<? extends List<Class<?>>> batch(List<Class<?>> types) { List<List<Class<?>>> batches = new ArrayList<List<Class<?>>>(); for (int index = 0; index < types.size(); index += size) { batches.add(new ArrayList<Class<?>>(types.subList(index, Math.min(types.size(), index + size)))); } return batches; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForFixedSize that = (ForFixedSize) object; return size == that.size; } @Override public int hashCode() { return size; } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.BatchAllocator.ForFixedSize{" + "size=" + size + '}'; } } } /** * A failure handler to apply during a retransformation. */ public interface FailureHandler { /** * Invoked when a batch of a retransformation failed. * * @param types The types included in the batch. * @param throwable The throwable indicating the failure. * @return {@code true} if the batch failure should be considered as handled. */ boolean onBatchFailure(List<Class<?>> types, Throwable throwable); /** * Invoked after all batches were completed. * * @param failures A map of all failures that were not considered as handled. */ void onFailure(Map<List<Class<?>>, Throwable> failures); /** * Default implementations of {@link FailureHandler}s. */ enum Default implements FailureHandler { /** * A fail fast failure handler fails a redefinition on the first failed batch. */ FAIL_FAST { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { throw new IllegalStateException("Could not transform " + types, throwable); } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { throw new IllegalStateException("Unexpected recovery from batch failure"); } }, /** * A fail last failure handler fails a redefinition after all batches were run if at least one batch failed. */ FAIL_LAST { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return false; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { throw new IllegalStateException("Could not transform " + failures); } }, /** * A suppressing failure handler ignores any failed batches. */ IGNORING { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return false; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } }, /** * A suppressing failure handler ignores any failed batches and does not expose them to the any listeners. */ SUPPRESSING { @Override public boolean onBatchFailure(List<Class<?>> types, Throwable throwable) { return true; } @Override public void onFailure(Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } }; @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.FailureHandler.Default." + name(); } } } /** * A listener to be applied during a redefinition. */ public interface Listener { /** * Invoked before applying a batch. * * @param index A running index of the batch starting at {@code 0}. * @param batch The types included in this batch. * @param types All types included in the retransformation. */ void onBatch(int index, List<Class<?>> batch, List<Class<?>> types); /** * Invoked upon an error during a batch. This method is not invoked if the failure handler handled this error. * * @param index A running index of the batch starting at {@code 0}. * @param batch The types included in this batch. * @param throwable The throwable that caused this invocation. * @param types All types included in the retransformation. */ void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types); /** * Invoked upon completion of all batches. * * @param index A total amount of batches that were executed. * @param types All types included in the retransformation. * @param failures A mapping of batch types to their unhandled failures. */ void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures); /** * A non-operational listener. */ enum NoOp implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { /* do nothing */ } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.NoOp." + name(); } } /** * A listener that invokes {@link Thread#yield()} prior to every batch but the first batch. */ enum Yielding implements Listener { /** * The singleton instance. */ INSTANCE; @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { if (index > 0) { Thread.yield(); } } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Yielding." + name(); } } /** * A listener adapter that offers non-operational implementations of all listener methods. */ abstract class Adapter implements Listener { @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { /* do nothing */ } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { /* do nothing */ } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { /* do nothing */ } } /** * A listener that invokes {@link Thread#sleep(long)} prior to every batch but the first batch. */ class Pausing extends Adapter { /** * The time to sleep in milliseconds between every two batches. */ private final long value; /** * Creates a new pausing listener. * * @param value The time to sleep in milliseconds between every two batches. */ protected Pausing(long value) { this.value = value; } /** * Creates a listener that pauses for the specified amount of time. If the specified value is {@code 0}, a * non-operational listener is returned. * * @param value The amount of time to pause between redefinition batches. * @param timeUnit The time unit of {@code value}. * @return An appropriate listener. */ public static Listener of(long value, TimeUnit timeUnit) { if (value > 0L) { return new Pausing(timeUnit.toMillis(value)); } else if (value == 0L) { return NoOp.INSTANCE; } else { throw new IllegalArgumentException("Cannot sleep for a non-positive amount of time: " + value); } } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { if (index > 0) { try { Thread.sleep(value); } catch (InterruptedException exception) { throw new RuntimeException("Sleep was interrupted", exception); } } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Pausing pausing = (Pausing) object; return value == pausing.value; } @Override public int hashCode() { return (int) (value ^ (value >>> 32)); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Pausing{" + "value=" + value + '}'; } } /** * A listener that writes events to a {@link PrintStream}. */ class StreamWriting implements Listener { /** * The print stream to write any events to. */ private final PrintStream printStream; /** * Creates a new stream writing listener. * * @param printStream The print stream to write any events to. */ public StreamWriting(PrintStream printStream) { this.printStream = printStream; } /** * Writes the stream result to {@link System#out}. * * @return An appropriate listener. */ public static Listener toSystemOut() { return new StreamWriting(System.out); } /** * Writes the stream result to {@link System#err}. * * @return An appropriate listener. */ public static Listener toSystemError() { return new StreamWriting(System.err); } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM BATCH #" + index + " (" + batch.size() + " types)"); } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { synchronized (printStream) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM ERROR #" + index + " (" + batch.size() + " types)"); throwable.printStackTrace(printStream); } } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { printStream.println(AgentBuilder.Listener.StreamWriting.PREFIX + " RETRANSFORM COMPLETE " + index + " batches (" + failures.size() + " errors)"); } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; StreamWriting streamWriting = (StreamWriting) object; return printStream.equals(streamWriting.printStream); } @Override public int hashCode() { return printStream.hashCode(); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.StreamWriting{" + "printStream=" + printStream + '}'; } } /** * A compound listener that delegates events to several listeners. */ class Compound implements Listener { /** * The listeners to invoke. */ private final List<? extends Listener> listeners; /** * Creates a new compound listener. * * @param listener The listeners to invoke. */ protected Compound(Listener... listener) { this(Arrays.asList(listener)); } /** * Creates a new compound listener. * * @param listeners The listeners to invoke. */ protected Compound(List<? extends Listener> listeners) { this.listeners = listeners; } @Override public void onBatch(int index, List<Class<?>> batch, List<Class<?>> types) { for (Listener listener : listeners) { listener.onBatch(index, batch, types); } } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { for (Listener listener : listeners) { listener.onError(index, batch, throwable, types); } } @Override public void onComplete(int index, List<Class<?>> types, Map<List<Class<?>>, Throwable> failures) { for (Listener listener : listeners) { listener.onComplete(index, types, failures); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Compound compound = (Compound) object; return listeners.equals(compound.listeners); } @Override public int hashCode() { return listeners.hashCode(); } @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Listener.Compound{" + "listeners=" + listeners + '}'; } } } /** * A collector is responsible for collecting classes that are to be considered for modification. * * @param <T> The type of element that is supplied to the instrumentation API. */ protected abstract static class Delegate<T> { /** * The transformation of the built agent. */ protected final Default.Transformation transformation; /** * A list of already collected redefinitions. */ protected final List<Class<?>> types; /** * Creates a new delegate. * * @param transformation The transformation of the built agent. */ protected Delegate(Default.Transformation transformation) { this.transformation = transformation; types = new ArrayList<Class<?>>(); } /** * Considers the supplied type for redefinition. * * @param ignoredTypeMatcher The ignored type matcher. * @param listener The listener to notify. * @param typeDescription The type's description. * @param type The type being redefined. * @param classBeingRedefined The type being redefined or {@code null} if it should be considered unavailable. * @param module The redefined type's module or {@code null} if the current VM does not support the module system. */ protected void consider(RawMatcher ignoredTypeMatcher, AgentBuilder.Listener listener, TypeDescription typeDescription, Class<?> type, Class<?> classBeingRedefined, JavaModule module) { consider(ignoredTypeMatcher, listener, typeDescription, type, classBeingRedefined, module, false); } /** * Considers the supplied type for redefinition. * * @param ignoredTypeMatcher The ignored type matcher. * @param listener The listener to notify. * @param typeDescription The type's description. * @param type The type being redefined. * @param classBeingRedefined The type being redefined or {@code null} if it should be considered unavailable. * @param module The redefined type's module or {@code null} if the current VM does not support the module system. * @param unmodifiable {@code true} if the type should be seen as unmodifiable. */ protected void consider(RawMatcher ignoredTypeMatcher, AgentBuilder.Listener listener, TypeDescription typeDescription, Class<?> type, Class<?> classBeingRedefined, JavaModule module, boolean unmodifiable) { if (unmodifiable || !(transformation.isAlive(typeDescription, type.getClassLoader(), JavaModule.ofType(type), classBeingRedefined, type.getProtectionDomain(), ignoredTypeMatcher) && types.add(type))) { try { try { listener.onIgnored(typeDescription, type.getClassLoader(), module); } finally { listener.onComplete(typeDescription.getName(), type.getClassLoader(), module); } } catch (Throwable ignored) { // Ignore exceptions that are thrown by listeners to mimic the behavior of a transformation. } } } /** * Applies the current retransformation process. * * @param instrumentation The instrumentation instance to apply the redefinition upon. * @param locationStrategy The location strategy to use. * @param listener The listener to notify. * @param redefinitionBatchAllocator The redefinition batch allocator to use. * @param redefinitionListener The redefinition listener to use. * @param redefinitionFailureHandler The redefinition failure handler to use. */ protected void apply(Instrumentation instrumentation, LocationStrategy locationStrategy, AgentBuilder.Listener listener, BatchAllocator redefinitionBatchAllocator, Listener redefinitionListener, FailureHandler redefinitionFailureHandler) { int index = 0; Map<List<Class<?>>, Throwable> failures = new HashMap<List<Class<?>>, Throwable>(); for (List<Class<?>> batch : redefinitionBatchAllocator.batch(types)) { List<T> transformations = new ArrayList<T>(batch.size()); for (Class<?> type : batch) { try { transformations.add(transform(type, locationStrategy)); } catch (Throwable throwable) { JavaModule module = JavaModule.ofType(type); try { listener.onError(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module, throwable); } finally { listener.onComplete(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module); } } } redefinitionListener.onBatch(index, batch, types); if (!transformations.isEmpty()) { try { doApply(transformations, instrumentation); } catch (Throwable throwable) { if (!redefinitionFailureHandler.onBatchFailure(batch, throwable)) { failures.put(batch, throwable); redefinitionListener.onError(index, batch, throwable, types); } } finally { index++; } } } redefinitionListener.onComplete(index, types, failures); if (!failures.isEmpty()) { redefinitionFailureHandler.onFailure(failures); } } /** * Turns a type into a transformation-ready primitive of the current redefinition process. * * @param type The type to transform. * @param locationStrategy The location strategy to use. * @return A primitive of the current redefinition process. * @throws IOException If an I/O error occured. */ protected abstract T transform(Class<?> type, LocationStrategy locationStrategy) throws IOException; /** * Applies a type redefinition. * * @param transformations The transformations to apply. * @param instrumentation The instrumentation instance to apply the redefinition on. * @throws UnmodifiableClassException If a class was not modifiable. * @throws ClassNotFoundException If a class was not found. */ protected abstract void doApply(List<T> transformations, Instrumentation instrumentation) throws UnmodifiableClassException, ClassNotFoundException; @Override public String toString() { return "AgentBuilder.RedefinitionStrategy.Delegate." + getClass().getSimpleName() + "{" + "transformation=" + transformation + ", types=" + types + '}'; } /** * A delegate that applies a <b>redefinition</b> of already loaded classes. */ protected static class ForRedefinition extends Delegate<ClassDefinition> { /** * Creates a new delegate for a redefinition. * * @param transformation The transformation of the built agent. */ protected ForRedefinition(Default.Transformation transformation) { super(transformation); } @Override protected ClassDefinition transform(Class<?> type, LocationStrategy locationStrategy) throws IOException { return new ClassDefinition(type, locationStrategy.classFileLocator(type.getClassLoader(), JavaModule.ofType(type)) .locate(TypeDescription.ForLoadedType.getName(type)) .resolve()); } @Override protected void doApply(List<ClassDefinition> transformations, Instrumentation instrumentation) throws UnmodifiableClassException, ClassNotFoundException { instrumentation.redefineClasses(transformations.toArray(new ClassDefinition[transformations.size()])); } } /** * A delegate that applies a <b>retransformation</b> of already loaded classes. */ protected static class ForRetransformation extends Delegate<Class<?>> { /** * Creates a new delegate for a retransformation. * * @param transformation The transformation to apply. */ protected ForRetransformation(Default.Transformation transformation) { super(transformation); } @Override protected Class<?> transform(Class<?> type, LocationStrategy locationStrategy) { return type; } @Override protected void doApply(List<Class<?>> transformations, Instrumentation instrumentation) throws UnmodifiableClassException { instrumentation.retransformClasses(transformations.toArray(new Class<?>[transformations.size()])); } } } } /** * Implements the instrumentation of the {@code LambdaMetafactory} if this feature is enabled. */ enum LambdaInstrumentationStrategy { /** * A strategy that enables instrumentation of the {@code LambdaMetafactory} if such a factory exists on the current VM. * Classes representing lambda expressions that are created by Byte Buddy are fully compatible to those created by * the JVM and can be serialized or deserialized to one another. The classes do however show a few differences: * <ul> * <li>Byte Buddy's classes are public with a public executing transformer. Doing so, it is not necessary to instantiate a * non-capturing lambda expression by reflection. This is done because Byte Buddy is not necessarily capable * of using reflection due to an active security manager.</li> * <li>Byte Buddy's classes are not marked as synthetic as an agent builder does not instrument synthetic classes * by default.</li> * </ul> */ ENABLED { @Override protected void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer) { if (LambdaFactory.register(classFileTransformer, new LambdaInstanceFactory(byteBuddy), LambdaInjector.INSTANCE)) { Class<?> lambdaMetaFactory; try { lambdaMetaFactory = Class.forName("java.lang.invoke.LambdaMetafactory"); } catch (ClassNotFoundException ignored) { return; } byteBuddy.with(Implementation.Context.Disabled.Factory.INSTANCE) .redefine(lambdaMetaFactory) .visit(new AsmVisitorWrapper.ForDeclaredMethods() .method(named("metafactory"), MetaFactoryRedirection.INSTANCE) .method(named("altMetafactory"), AlternativeMetaFactoryRedirection.INSTANCE)) .make() .load(lambdaMetaFactory.getClassLoader(), ClassReloadingStrategy.of(instrumentation)); } } }, /** * A strategy that does not instrument the {@code LambdaMetafactory}. */ DISABLED { @Override protected void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer) { /* do nothing */ } }; /** * Indicates that an original implementation can be ignored when redefining a method. */ protected static final MethodVisitor IGNORE_ORIGINAL = null; /** * Releases the supplied class file transformer when it was built with {@link AgentBuilder#with(LambdaInstrumentationStrategy)} enabled. * Subsequently, the class file transformer is no longer applied when a class that represents a lambda expression is created. * * @param classFileTransformer The class file transformer to release. * @param instrumentation The instrumentation instance that is used to potentially rollback the instrumentation of the {@code LambdaMetafactory}. */ public static void release(ClassFileTransformer classFileTransformer, Instrumentation instrumentation) { if (LambdaFactory.release(classFileTransformer)) { try { ClassReloadingStrategy.of(instrumentation).reset(Class.forName("java.lang.invoke.LambdaMetafactory")); } catch (Exception exception) { throw new IllegalStateException("Could not release lambda transformer", exception); } } } /** * Returns an enabled lambda instrumentation strategy for {@code true}. * * @param enabled If lambda instrumentation should be enabled. * @return {@code true} if the returned strategy should be enabled. */ public static LambdaInstrumentationStrategy of(boolean enabled) { return enabled ? ENABLED : DISABLED; } /** * Applies a transformation to lambda instances if applicable. * * @param byteBuddy The Byte Buddy instance to use. * @param instrumentation The instrumentation instance for applying a redefinition. * @param classFileTransformer The class file transformer to apply. */ protected abstract void apply(ByteBuddy byteBuddy, Instrumentation instrumentation, ClassFileTransformer classFileTransformer); /** * Indicates if this strategy enables instrumentation of the {@code LambdaMetafactory}. * * @return {@code true} if this strategy is enabled. */ public boolean isEnabled() { return this == ENABLED; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy." + name(); } /** * An injector for injecting the lambda class dispatcher to the system class path. */ protected enum LambdaInjector implements Callable<Class<?>> { /** * The singleton instance. */ INSTANCE; @Override public Class<?> call() throws Exception { TypeDescription lambdaFactory = new TypeDescription.ForLoadedType(LambdaFactory.class); return ClassInjector.UsingReflection.ofSystemClassLoader() .inject(Collections.singletonMap(lambdaFactory, ClassFileLocator.ForClassLoader.read(LambdaFactory.class).resolve())) .get(lambdaFactory); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInjector." + name(); } } /** * A factory that creates instances that represent lambda expressions. */ protected static class LambdaInstanceFactory { /** * The name of a factory for a lambda expression. */ private static final String LAMBDA_FACTORY = "get$Lambda"; /** * A prefix for a field that represents a property of a lambda expression. */ private static final String FIELD_PREFIX = "arg$"; /** * The infix to use for naming classes that represent lambda expression. The additional prefix * is necessary because the subsequent counter is not sufficient to keep names unique compared * to the original factory. */ private static final String LAMBDA_TYPE_INFIX = "$$Lambda$ByteBuddy$"; /** * A type-safe constant to express that a class is not already loaded when applying a class file transformer. */ private static final Class<?> NOT_PREVIOUSLY_DEFINED = null; /** * A counter for naming lambda expressions randomly. */ private static final AtomicInteger LAMBDA_NAME_COUNTER = new AtomicInteger(); /** * The Byte Buddy instance to use for creating lambda objects. */ private final ByteBuddy byteBuddy; /** * Creates a new lambda instance factory. * * @param byteBuddy The Byte Buddy instance to use for creating lambda objects. */ protected LambdaInstanceFactory(ByteBuddy byteBuddy) { this.byteBuddy = byteBuddy; } /** * Applies this lambda meta factory. * * @param targetTypeLookup A lookup context representing the creating class of this lambda expression. * @param lambdaMethodName The name of the lambda expression's represented method. * @param factoryMethodType The type of the lambda expression's represented method. * @param lambdaMethodType The type of the lambda expression's factory method. * @param targetMethodHandle A handle representing the target of the lambda expression's method. * @param specializedLambdaMethodType A specialization of the type of the lambda expression's represented method. * @param serializable {@code true} if the lambda expression should be serializable. * @param markerInterfaces A list of interfaces for the lambda expression to represent. * @param additionalBridges A list of additional bridge methods to be implemented by the lambda expression. * @param classFileTransformers A collection of class file transformers to apply when creating the class. * @return A binary representation of the transformed class file. */ public byte[] make(Object targetTypeLookup, String lambdaMethodName, Object factoryMethodType, Object lambdaMethodType, Object targetMethodHandle, Object specializedLambdaMethodType, boolean serializable, List<Class<?>> markerInterfaces, List<?> additionalBridges, Collection<? extends ClassFileTransformer> classFileTransformers) { JavaConstant.MethodType factoryMethod = JavaConstant.MethodType.ofLoaded(factoryMethodType); JavaConstant.MethodType lambdaMethod = JavaConstant.MethodType.ofLoaded(lambdaMethodType); JavaConstant.MethodHandle targetMethod = JavaConstant.MethodHandle.ofLoaded(targetMethodHandle, targetTypeLookup); JavaConstant.MethodType specializedLambdaMethod = JavaConstant.MethodType.ofLoaded(specializedLambdaMethodType); Class<?> targetType = JavaConstant.MethodHandle.lookupType(targetTypeLookup); String lambdaClassName = targetType.getName() + LAMBDA_TYPE_INFIX + LAMBDA_NAME_COUNTER.incrementAndGet(); DynamicType.Builder<?> builder = byteBuddy .subclass(factoryMethod.getReturnType(), ConstructorStrategy.Default.NO_CONSTRUCTORS) .modifiers(TypeManifestation.FINAL, Visibility.PUBLIC) .implement(markerInterfaces) .name(lambdaClassName) .defineConstructor(Visibility.PUBLIC) .withParameters(factoryMethod.getParameterTypes()) .intercept(ConstructorImplementation.INSTANCE) .method(named(lambdaMethodName) .and(takesArguments(lambdaMethod.getParameterTypes())) .and(returns(lambdaMethod.getReturnType()))) .intercept(new LambdaMethodImplementation(targetMethod, specializedLambdaMethod)); int index = 0; for (TypeDescription capturedType : factoryMethod.getParameterTypes()) { builder = builder.defineField(FIELD_PREFIX + ++index, capturedType, Visibility.PRIVATE, FieldManifestation.FINAL); } if (!factoryMethod.getParameterTypes().isEmpty()) { builder = builder.defineMethod(LAMBDA_FACTORY, factoryMethod.getReturnType(), Visibility.PRIVATE, Ownership.STATIC) .withParameters(factoryMethod.getParameterTypes()) .intercept(FactoryImplementation.INSTANCE); } if (serializable) { if (!markerInterfaces.contains(Serializable.class)) { builder = builder.implement(Serializable.class); } builder = builder.defineMethod("writeReplace", Object.class, Visibility.PRIVATE) .intercept(new SerializationImplementation(new TypeDescription.ForLoadedType(targetType), factoryMethod.getReturnType(), lambdaMethodName, lambdaMethod, targetMethod, JavaConstant.MethodType.ofLoaded(specializedLambdaMethodType))); } else if (factoryMethod.getReturnType().isAssignableTo(Serializable.class)) { builder = builder.defineMethod("readObject", void.class, Visibility.PRIVATE) .withParameters(ObjectInputStream.class) .throwing(NotSerializableException.class) .intercept(ExceptionMethod.throwing(NotSerializableException.class, "Non-serializable lambda")) .defineMethod("writeObject", void.class, Visibility.PRIVATE) .withParameters(ObjectOutputStream.class) .throwing(NotSerializableException.class) .intercept(ExceptionMethod.throwing(NotSerializableException.class, "Non-serializable lambda")); } for (Object additionalBridgeType : additionalBridges) { JavaConstant.MethodType additionalBridge = JavaConstant.MethodType.ofLoaded(additionalBridgeType); builder = builder.defineMethod(lambdaMethodName, additionalBridge.getReturnType(), MethodManifestation.BRIDGE, Visibility.PUBLIC) .withParameters(additionalBridge.getParameterTypes()) .intercept(new BridgeMethodImplementation(lambdaMethodName, lambdaMethod)); } byte[] classFile = builder.make().getBytes(); for (ClassFileTransformer classFileTransformer : classFileTransformers) { try { byte[] transformedClassFile = classFileTransformer.transform(targetType.getClassLoader(), lambdaClassName.replace('.', '/'), NOT_PREVIOUSLY_DEFINED, targetType.getProtectionDomain(), classFile); classFile = transformedClassFile == null ? classFile : transformedClassFile; } catch (Throwable ignored) { /* do nothing */ } } return classFile; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && byteBuddy.equals(((LambdaInstanceFactory) other).byteBuddy); } @Override public int hashCode() { return byteBuddy.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory{" + "byteBuddy=" + byteBuddy + '}'; } /** * Implements a lambda class's executing transformer. */ @SuppressFBWarnings(value = "SE_BAD_FIELD", justification = "An enumeration does not serialize fields") protected enum ConstructorImplementation implements Implementation { /** * The singleton instance. */ INSTANCE; /** * A reference to the {@link Object} class's default executing transformer. */ private final MethodDescription.InDefinedShape objectConstructor; /** * Creates a new executing transformer implementation. */ ConstructorImplementation() { objectConstructor = TypeDescription.OBJECT.getDeclaredMethods().filter(isConstructor()).getOnly(); } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.getInstrumentedType().getDeclaredFields()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.ConstructorImplementation." + name(); } /** * An appender to implement the executing transformer. */ protected static class Appender implements ByteCodeAppender { /** * The fields that are declared by the instrumented type. */ private final List<FieldDescription.InDefinedShape> declaredFields; /** * Creates a new appender. * * @param declaredFields The fields that are declared by the instrumented type. */ protected Appender(List<FieldDescription.InDefinedShape> declaredFields) { this.declaredFields = declaredFields; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { List<StackManipulation> fieldAssignments = new ArrayList<StackManipulation>(declaredFields.size() * 3); for (ParameterDescription parameterDescription : instrumentedMethod.getParameters()) { fieldAssignments.add(MethodVariableAccess.REFERENCE.loadOffset(0)); fieldAssignments.add(MethodVariableAccess.of(parameterDescription.getType()).loadOffset(parameterDescription.getOffset())); fieldAssignments.add(FieldAccess.forField(declaredFields.get(parameterDescription.getIndex())).putter()); } return new Size(new StackManipulation.Compound( MethodVariableAccess.REFERENCE.loadOffset(0), MethodInvocation.invoke(INSTANCE.objectConstructor), new StackManipulation.Compound(fieldAssignments), MethodReturn.VOID ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && declaredFields.equals(((Appender) other).declaredFields); } @Override public int hashCode() { return declaredFields.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.ConstructorImplementation.Appender{" + "declaredFields=" + declaredFields + '}'; } } } /** * An implementation of a instance factory for a lambda expression's class. */ protected enum FactoryImplementation implements Implementation { /** * The singleton instance. */ INSTANCE; @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.getInstrumentedType()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.FactoryImplementation." + name(); } /** * An appender for a lambda expression factory. */ protected static class Appender implements ByteCodeAppender { /** * The instrumented type. */ private final TypeDescription instrumentedType; /** * Creates a new appender. * * @param instrumentedType The instrumented type. */ protected Appender(TypeDescription instrumentedType) { this.instrumentedType = instrumentedType; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return new Size(new StackManipulation.Compound( TypeCreation.of(instrumentedType), Duplication.SINGLE, MethodVariableAccess.allArgumentsOf(instrumentedMethod), MethodInvocation.invoke(instrumentedType.getDeclaredMethods().filter(isConstructor()).getOnly()), MethodReturn.REFERENCE ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && instrumentedType.equals(((Appender) other).instrumentedType); } @Override public int hashCode() { return instrumentedType.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.FactoryImplementation.Appender{" + "instrumentedType=" + instrumentedType + '}'; } } } /** * Implements a lambda expression's functional method. */ protected static class LambdaMethodImplementation implements Implementation { /** * The handle of the target method of the lambda expression. */ private final JavaConstant.MethodHandle targetMethod; /** * The specialized type of the lambda method. */ private final JavaConstant.MethodType specializedLambdaMethod; /** * Creates a implementation of a lambda expression's functional method. * * @param targetMethod The target method of the lambda expression. * @param specializedLambdaMethod The specialized type of the lambda method. */ protected LambdaMethodImplementation(JavaConstant.MethodHandle targetMethod, JavaConstant.MethodType specializedLambdaMethod) { this.targetMethod = targetMethod; this.specializedLambdaMethod = specializedLambdaMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(targetMethod.getOwnerType() .getDeclaredMethods() .filter(named(targetMethod.getName()) .and(returns(targetMethod.getReturnType())) .and(takesArguments(targetMethod.getParameterTypes()))) .getOnly(), specializedLambdaMethod, implementationTarget.getInstrumentedType().getDeclaredFields()); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; LambdaMethodImplementation that = (LambdaMethodImplementation) other; return targetMethod.equals(that.targetMethod) && specializedLambdaMethod.equals(that.specializedLambdaMethod); } @Override public int hashCode() { int result = targetMethod.hashCode(); result = 31 * result + specializedLambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.LambdaMethodImplementation{" + "targetMethod=" + targetMethod + ", specializedLambdaMethod=" + specializedLambdaMethod + '}'; } /** * An appender for a lambda expression's functional method. */ protected static class Appender implements ByteCodeAppender { /** * The target method of the lambda expression. */ private final MethodDescription targetMethod; /** * The specialized type of the lambda method. */ private final JavaConstant.MethodType specializedLambdaMethod; /** * The instrumented type's declared fields. */ private final List<FieldDescription.InDefinedShape> declaredFields; /** * Creates an appender of a lambda expression's functional method. * * @param targetMethod The target method of the lambda expression. * @param specializedLambdaMethod The specialized type of the lambda method. * @param declaredFields The instrumented type's declared fields. */ protected Appender(MethodDescription targetMethod, JavaConstant.MethodType specializedLambdaMethod, List<FieldDescription.InDefinedShape> declaredFields) { this.targetMethod = targetMethod; this.specializedLambdaMethod = specializedLambdaMethod; this.declaredFields = declaredFields; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { List<StackManipulation> fieldAccess = new ArrayList<StackManipulation>(declaredFields.size() * 2); for (FieldDescription.InDefinedShape fieldDescription : declaredFields) { fieldAccess.add(MethodVariableAccess.REFERENCE.loadOffset(0)); fieldAccess.add(FieldAccess.forField(fieldDescription).getter()); } List<StackManipulation> parameterAccess = new ArrayList<StackManipulation>(instrumentedMethod.getParameters().size() * 2); for (ParameterDescription parameterDescription : instrumentedMethod.getParameters()) { parameterAccess.add(MethodVariableAccess.of(parameterDescription.getType()).loadOffset(parameterDescription.getOffset())); parameterAccess.add(Assigner.DEFAULT.assign(parameterDescription.getType(), specializedLambdaMethod.getParameterTypes().get(parameterDescription.getIndex()).asGenericType(), Assigner.Typing.DYNAMIC)); } return new Size(new StackManipulation.Compound( new StackManipulation.Compound(fieldAccess), new StackManipulation.Compound(parameterAccess), MethodInvocation.invoke(targetMethod), MethodReturn.returning(targetMethod.getReturnType().asErasure()) ).apply(methodVisitor, implementationContext).getMaximalSize(), instrumentedMethod.getStackSize()); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Appender appender = (Appender) other; return targetMethod.equals(appender.targetMethod) && declaredFields.equals(appender.declaredFields) && specializedLambdaMethod.equals(appender.specializedLambdaMethod); } @Override public int hashCode() { int result = targetMethod.hashCode(); result = 31 * result + declaredFields.hashCode(); result = 31 * result + specializedLambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.LambdaMethodImplementation.Appender{" + "targetMethod=" + targetMethod + ", specializedLambdaMethod=" + specializedLambdaMethod + ", declaredFields=" + declaredFields + '}'; } } } /** * Implements the {@code writeReplace} method for serializable lambda expressions. */ protected static class SerializationImplementation implements Implementation { /** * The lambda expression's declaring type. */ private final TypeDescription targetType; /** * The lambda expression's functional type. */ private final TypeDescription lambdaType; /** * The lambda expression's functional method name. */ private final String lambdaMethodName; /** * The method type of the lambda expression's functional method. */ private final JavaConstant.MethodType lambdaMethod; /** * A handle that references the lambda expressions invocation target. */ private final JavaConstant.MethodHandle targetMethod; /** * The specialized method type of the lambda expression's functional method. */ private final JavaConstant.MethodType specializedMethod; /** * Creates a new implementation for a serializable's lambda expression's {@code writeReplace} method. * * @param targetType The lambda expression's declaring type. * @param lambdaType The lambda expression's functional type. * @param lambdaMethodName The lambda expression's functional method name. * @param lambdaMethod The method type of the lambda expression's functional method. * @param targetMethod A handle that references the lambda expressions invocation target. * @param specializedMethod The specialized method type of the lambda expression's functional method. */ protected SerializationImplementation(TypeDescription targetType, TypeDescription lambdaType, String lambdaMethodName, JavaConstant.MethodType lambdaMethod, JavaConstant.MethodHandle targetMethod, JavaConstant.MethodType specializedMethod) { this.targetType = targetType; this.lambdaType = lambdaType; this.lambdaMethodName = lambdaMethodName; this.lambdaMethod = lambdaMethod; this.targetMethod = targetMethod; this.specializedMethod = specializedMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { TypeDescription serializedLambda; try { serializedLambda = new TypeDescription.ForLoadedType(Class.forName("java.lang.invoke.SerializedLambda")); } catch (ClassNotFoundException exception) { throw new IllegalStateException("Cannot find class for lambda serialization", exception); } List<StackManipulation> lambdaArguments = new ArrayList<StackManipulation>(implementationTarget.getInstrumentedType().getDeclaredFields().size()); for (FieldDescription.InDefinedShape fieldDescription : implementationTarget.getInstrumentedType().getDeclaredFields()) { lambdaArguments.add(new StackManipulation.Compound(MethodVariableAccess.REFERENCE.loadOffset(0), FieldAccess.forField(fieldDescription).getter(), Assigner.DEFAULT.assign(fieldDescription.getType(), TypeDescription.Generic.OBJECT, Assigner.Typing.STATIC))); } return new ByteCodeAppender.Simple(new StackManipulation.Compound( TypeCreation.of(serializedLambda), Duplication.SINGLE, ClassConstant.of(targetType), new TextConstant(lambdaType.getInternalName()), new TextConstant(lambdaMethodName), new TextConstant(lambdaMethod.getDescriptor()), IntegerConstant.forValue(targetMethod.getHandleType().getIdentifier()), new TextConstant(targetMethod.getOwnerType().getInternalName()), new TextConstant(targetMethod.getName()), new TextConstant(targetMethod.getDescriptor()), new TextConstant(specializedMethod.getDescriptor()), ArrayFactory.forType(TypeDescription.Generic.OBJECT).withValues(lambdaArguments), MethodInvocation.invoke(serializedLambda.getDeclaredMethods().filter(isConstructor()).getOnly()), MethodReturn.REFERENCE )); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; SerializationImplementation that = (SerializationImplementation) other; return targetType.equals(that.targetType) && lambdaType.equals(that.lambdaType) && lambdaMethodName.equals(that.lambdaMethodName) && lambdaMethod.equals(that.lambdaMethod) && targetMethod.equals(that.targetMethod) && specializedMethod.equals(that.specializedMethod); } @Override public int hashCode() { int result = targetType.hashCode(); result = 31 * result + lambdaType.hashCode(); result = 31 * result + lambdaMethodName.hashCode(); result = 31 * result + lambdaMethod.hashCode(); result = 31 * result + targetMethod.hashCode(); result = 31 * result + specializedMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.SerializationImplementation{" + "targetType=" + targetType + ", lambdaType=" + lambdaType + ", lambdaMethodName='" + lambdaMethodName + '\'' + ", lambdaMethod=" + lambdaMethod + ", targetMethod=" + targetMethod + ", specializedMethod=" + specializedMethod + '}'; } } /** * Implements an explicit bridge method for a lambda expression. */ protected static class BridgeMethodImplementation implements Implementation { /** * The name of the lambda expression's functional method. */ private final String lambdaMethodName; /** * The actual type of the lambda expression's functional method. */ private final JavaConstant.MethodType lambdaMethod; /** * Creates a new bridge method implementation for a lambda expression. * * @param lambdaMethodName The name of the lambda expression's functional method. * @param lambdaMethod The actual type of the lambda expression's functional method. */ protected BridgeMethodImplementation(String lambdaMethodName, JavaConstant.MethodType lambdaMethod) { this.lambdaMethodName = lambdaMethodName; this.lambdaMethod = lambdaMethod; } @Override public ByteCodeAppender appender(Target implementationTarget) { return new Appender(implementationTarget.invokeSuper(new MethodDescription.SignatureToken(lambdaMethodName, lambdaMethod.getReturnType(), lambdaMethod.getParameterTypes()))); } @Override public InstrumentedType prepare(InstrumentedType instrumentedType) { return instrumentedType; } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; BridgeMethodImplementation that = (BridgeMethodImplementation) other; return lambdaMethodName.equals(that.lambdaMethodName) && lambdaMethod.equals(that.lambdaMethod); } @Override public int hashCode() { int result = lambdaMethodName.hashCode(); result = 31 * result + lambdaMethod.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.BridgeMethodImplementation{" + "lambdaMethodName='" + lambdaMethodName + '\'' + ", lambdaMethod=" + lambdaMethod + '}'; } /** * An appender for implementing a bridge method for a lambda expression. */ protected static class Appender implements ByteCodeAppender { /** * The invocation of the bridge's target method. */ private final SpecialMethodInvocation bridgeTargetInvocation; /** * Creates a new appender for invoking a lambda expression's bridge method target. * * @param bridgeTargetInvocation The invocation of the bridge's target method. */ protected Appender(SpecialMethodInvocation bridgeTargetInvocation) { this.bridgeTargetInvocation = bridgeTargetInvocation; } @Override public Size apply(MethodVisitor methodVisitor, Context implementationContext, MethodDescription instrumentedMethod) { return new Compound(new Simple( MethodVariableAccess.allArgumentsOf(instrumentedMethod) .asBridgeOf(bridgeTargetInvocation.getMethodDescription()) .prependThisReference(), bridgeTargetInvocation, bridgeTargetInvocation.getMethodDescription().getReturnType().asErasure().isAssignableTo(instrumentedMethod.getReturnType().asErasure()) ? StackManipulation.Trivial.INSTANCE : TypeCasting.to(instrumentedMethod.getReceiverType().asErasure()), MethodReturn.returning(instrumentedMethod.getReturnType().asErasure()) )).apply(methodVisitor, implementationContext, instrumentedMethod); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && bridgeTargetInvocation.equals(((Appender) other).bridgeTargetInvocation); } @Override public int hashCode() { return bridgeTargetInvocation.hashCode(); } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.LambdaInstanceFactory.BridgeMethodImplementation.Appender{" + "bridgeTargetInvocation=" + bridgeTargetInvocation + '}'; } } } } /** * Implements the regular lambda meta factory. The implementation represents the following code: * <blockquote><pre> * public static CallSite metafactory(MethodHandles.Lookup caller, * String invokedName, * MethodType invokedType, * MethodType samMethodType, * MethodHandle implMethod, * MethodType instantiatedMethodType) throws Exception { * Unsafe unsafe = Unsafe.getUnsafe(); * {@code Class<?>} lambdaClass = unsafe.defineAnonymousClass(caller.lookupClass(), * (byte[]) ClassLoader.getSystemClassLoader().loadClass("net.bytebuddy.agent.builder.LambdaFactory").getDeclaredMethod("make", * Object.class, * String.class, * Object.class, * Object.class, * Object.class, * Object.class, * boolean.class, * List.class, * List.class).invoke(null, * caller, * invokedName, * invokedType, * samMethodType, * implMethod, * instantiatedMethodType, * false, * Collections.emptyList(), * Collections.emptyList()), * null); * unsafe.ensureClassInitialized(lambdaClass); * return invokedType.parameterCount() == 0 * ? new ConstantCallSite(MethodHandles.constant(invokedType.returnType(), lambdaClass.getDeclaredConstructors()[0].newInstance())) * : new ConstantCallSite(MethodHandles.Lookup.IMPL_LOOKUP.findStatic(lambdaClass, "get$Lambda", invokedType)); * </pre></blockquote> */ protected enum MetaFactoryRedirection implements AsmVisitorWrapper.ForDeclaredMethods.MethodVisitorWrapper { /** * The singleton instance. */ INSTANCE; @Override public MethodVisitor wrap(TypeDescription instrumentedType, MethodDescription.InDefinedShape methodDescription, MethodVisitor methodVisitor, ClassFileVersion classFileVersion, int writerFlags, int readerFlags) { methodVisitor.visitCode(); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "sun/misc/Unsafe", "getUnsafe", "()Lsun/misc/Unsafe;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/ClassLoader", "getSystemClassLoader", "()Ljava/lang/ClassLoader;", false); methodVisitor.visitLdcInsn("net.bytebuddy.agent.builder.LambdaFactory"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/ClassLoader", "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;", false); methodVisitor.visitLdcInsn("make"); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/String;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/Boolean", "TYPE", "Ljava/lang/Class;"); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredMethod", "(Ljava/lang/String;[Ljava/lang/Class;)Ljava/lang/reflect/Method;", false); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitVarInsn(Opcodes.ALOAD, 1); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ALOAD, 4); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 5); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Collections", "emptyList", "()Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Collections", "emptyList", "()Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Method", "invoke", "(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "[B"); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "defineAnonymousClass", "(Ljava/lang/Class;[B[Ljava/lang/Object;)Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "ensureClassInitialized", "(Ljava/lang/Class;)V", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "parameterCount", "()I", false); Label conditionalDefault = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFNE, conditionalDefault); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "returnType", "()Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredConstructors", "()[Ljava/lang/reflect/Constructor;", false); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Constructor", "newInstance", "([Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/invoke/MethodHandles", "constant", "(Ljava/lang/Class;Ljava/lang/Object;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); Label conditionalAlternative = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, conditionalAlternative); methodVisitor.visitLabel(conditionalDefault); methodVisitor.visitFrame(Opcodes.F_APPEND, 2, new Object[]{"sun/misc/Unsafe", "java/lang/Class"}, 0, null); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/invoke/MethodHandles$Lookup", "IMPL_LOOKUP", "Ljava/lang/invoke/MethodHandles$Lookup;"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitLdcInsn("get$Lambda"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); methodVisitor.visitLabel(conditionalAlternative); methodVisitor.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/invoke/CallSite"}); methodVisitor.visitInsn(Opcodes.ARETURN); methodVisitor.visitMaxs(8, 8); methodVisitor.visitEnd(); return IGNORE_ORIGINAL; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.MetaFactoryRedirection." + name(); } } /** * Implements the alternative lambda meta factory. The implementation represents the following code: * <blockquote><pre> * public static CallSite altMetafactory(MethodHandles.Lookup caller, * String invokedName, * MethodType invokedType, * Object... args) throws Exception { * int flags = (Integer) args[3]; * int argIndex = 4; * {@code Class<?>[]} markerInterface; * if ((flags {@code &} FLAG_MARKERS) != 0) { * int markerCount = (Integer) args[argIndex++]; * markerInterface = new {@code Class<?>}[markerCount]; * System.arraycopy(args, argIndex, markerInterface, 0, markerCount); * argIndex += markerCount; * } else { * markerInterface = new {@code Class<?>}[0]; * } * MethodType[] additionalBridge; * if ((flags {@code &} FLAG_BRIDGES) != 0) { * int bridgeCount = (Integer) args[argIndex++]; * additionalBridge = new MethodType[bridgeCount]; * System.arraycopy(args, argIndex, additionalBridge, 0, bridgeCount); * // argIndex += bridgeCount; * } else { * additionalBridge = new MethodType[0]; * } * Unsafe unsafe = Unsafe.getUnsafe(); * {@code Class<?>} lambdaClass = unsafe.defineAnonymousClass(caller.lookupClass(), * (byte[]) ClassLoader.getSystemClassLoader().loadClass("net.bytebuddy.agent.builder.LambdaFactory").getDeclaredMethod("make", * Object.class, * String.class, * Object.class, * Object.class, * Object.class, * Object.class, * boolean.class, * List.class, * List.class).invoke(null, * caller, * invokedName, * invokedType, * args[0], * args[1], * args[2], * (flags {@code &} FLAG_SERIALIZABLE) != 0, * Arrays.asList(markerInterface), * Arrays.asList(additionalBridge)), * null); * unsafe.ensureClassInitialized(lambdaClass); * return invokedType.parameterCount() == 0 * ? new ConstantCallSite(MethodHandles.constant(invokedType.returnType(), lambdaClass.getDeclaredConstructors()[0].newInstance())) * : new ConstantCallSite(MethodHandles.Lookup.IMPL_LOOKUP.findStatic(lambdaClass, "get$Lambda", invokedType)); * } * </pre></blockquote> */ protected enum AlternativeMetaFactoryRedirection implements AsmVisitorWrapper.ForDeclaredMethods.MethodVisitorWrapper { /** * The singleton instance. */ INSTANCE; @Override public MethodVisitor wrap(TypeDescription instrumentedType, MethodDescription.InDefinedShape methodDescription, MethodVisitor methodVisitor, ClassFileVersion classFileVersion, int writerFlags, int readerFlags) { methodVisitor.visitCode(); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 4); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ISTORE, 5); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitInsn(Opcodes.IAND); Label markerInterfaceLoop = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, markerInterfaceLoop); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitIincInsn(5, 1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 7); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ILOAD, 7); methodVisitor.visitInsn(Opcodes.IADD); methodVisitor.visitVarInsn(Opcodes.ISTORE, 5); Label markerInterfaceExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, markerInterfaceExit); methodVisitor.visitLabel(markerInterfaceLoop); methodVisitor.visitFrame(Opcodes.F_APPEND, 2, new Object[]{Opcodes.INTEGER, Opcodes.INTEGER}, 0, null); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 6); methodVisitor.visitLabel(markerInterfaceExit); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"[Ljava/lang/Class;"}, 0, null); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitInsn(Opcodes.IAND); Label additionalBridgesLoop = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, additionalBridgesLoop); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitIincInsn(5, 1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "java/lang/Integer"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Integer", "intValue", "()I", false); methodVisitor.visitVarInsn(Opcodes.ISTORE, 8); methodVisitor.visitVarInsn(Opcodes.ILOAD, 8); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/invoke/MethodType"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitVarInsn(Opcodes.ILOAD, 5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ILOAD, 8); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false); Label additionalBridgesExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, additionalBridgesExit); methodVisitor.visitLabel(additionalBridgesLoop); methodVisitor.visitFrame(Opcodes.F_SAME, 0, null, 0, null); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/invoke/MethodType"); methodVisitor.visitVarInsn(Opcodes.ASTORE, 7); methodVisitor.visitLabel(additionalBridgesExit); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"[Ljava/lang/invoke/MethodType;"}, 0, null); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "sun/misc/Unsafe", "getUnsafe", "()Lsun/misc/Unsafe;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "lookupClass", "()Ljava/lang/Class;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/ClassLoader", "getSystemClassLoader", "()Ljava/lang/ClassLoader;", false); methodVisitor.visitLdcInsn("net.bytebuddy.agent.builder.LambdaFactory"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/ClassLoader", "loadClass", "(Ljava/lang/String;)Ljava/lang/Class;", false); methodVisitor.visitLdcInsn("make"); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Class"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/String;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitLdcInsn(Type.getType("Ljava/lang/Object;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/Boolean", "TYPE", "Ljava/lang/Class;"); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitLdcInsn(Type.getType("Ljava/util/List;")); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredMethod", "(Ljava/lang/String;[Ljava/lang/Class;)Ljava/lang/reflect/Method;", false); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 9); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitVarInsn(Opcodes.ALOAD, 0); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitVarInsn(Opcodes.ALOAD, 1); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_3); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_4); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitInsn(Opcodes.ICONST_5); methodVisitor.visitVarInsn(Opcodes.ALOAD, 3); methodVisitor.visitInsn(Opcodes.ICONST_2); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 6); methodVisitor.visitVarInsn(Opcodes.ILOAD, 4); methodVisitor.visitInsn(Opcodes.ICONST_1); methodVisitor.visitInsn(Opcodes.IAND); Label callSiteConditional = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFEQ, callSiteConditional); methodVisitor.visitInsn(Opcodes.ICONST_1); Label callSiteAlternative = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, callSiteAlternative); methodVisitor.visitLabel(callSiteConditional); methodVisitor.visitFrame(Opcodes.F_FULL, 9, new Object[]{"java/lang/invoke/MethodHandles$Lookup", "java/lang/String", "java/lang/invoke/MethodType", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER, "[Ljava/lang/Class;", "[Ljava/lang/invoke/MethodType;", "sun/misc/Unsafe"}, 7, new Object[]{"sun/misc/Unsafe", "java/lang/Class", "java/lang/reflect/Method", Opcodes.NULL, "[Ljava/lang/Object;", "[Ljava/lang/Object;", Opcodes.INTEGER}); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitLabel(callSiteAlternative); methodVisitor.visitFrame(Opcodes.F_FULL, 9, new Object[]{"java/lang/invoke/MethodHandles$Lookup", "java/lang/String", "java/lang/invoke/MethodType", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER, "[Ljava/lang/Class;", "[Ljava/lang/invoke/MethodType;", "sun/misc/Unsafe"}, 8, new Object[]{"sun/misc/Unsafe", "java/lang/Class", "java/lang/reflect/Method", Opcodes.NULL, "[Ljava/lang/Object;", "[Ljava/lang/Object;", Opcodes.INTEGER, Opcodes.INTEGER}); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/Boolean", "valueOf", "(Z)Ljava/lang/Boolean;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 7); methodVisitor.visitVarInsn(Opcodes.ALOAD, 6); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Arrays", "asList", "([Ljava/lang/Object;)Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitIntInsn(Opcodes.BIPUSH, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 7); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/util/Arrays", "asList", "([Ljava/lang/Object;)Ljava/util/List;", false); methodVisitor.visitInsn(Opcodes.AASTORE); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Method", "invoke", "(Ljava/lang/Object;[Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitTypeInsn(Opcodes.CHECKCAST, "[B"); methodVisitor.visitInsn(Opcodes.ACONST_NULL); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "defineAnonymousClass", "(Ljava/lang/Class;[B[Ljava/lang/Object;)Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ASTORE, 9); methodVisitor.visitVarInsn(Opcodes.ALOAD, 8); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "sun/misc/Unsafe", "ensureClassInitialized", "(Ljava/lang/Class;)V", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "parameterCount", "()I", false); Label callSiteJump = new Label(); methodVisitor.visitJumpInsn(Opcodes.IFNE, callSiteJump); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodType", "returnType", "()Ljava/lang/Class;", false); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/Class", "getDeclaredConstructors", "()[Ljava/lang/reflect/Constructor;", false); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitInsn(Opcodes.AALOAD); methodVisitor.visitInsn(Opcodes.ICONST_0); methodVisitor.visitTypeInsn(Opcodes.ANEWARRAY, "java/lang/Object"); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/reflect/Constructor", "newInstance", "([Ljava/lang/Object;)Ljava/lang/Object;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESTATIC, "java/lang/invoke/MethodHandles", "constant", "(Ljava/lang/Class;Ljava/lang/Object;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); Label callSiteExit = new Label(); methodVisitor.visitJumpInsn(Opcodes.GOTO, callSiteExit); methodVisitor.visitLabel(callSiteJump); methodVisitor.visitFrame(Opcodes.F_APPEND, 1, new Object[]{"java/lang/Class"}, 0, null); methodVisitor.visitTypeInsn(Opcodes.NEW, "java/lang/invoke/ConstantCallSite"); methodVisitor.visitInsn(Opcodes.DUP); methodVisitor.visitFieldInsn(Opcodes.GETSTATIC, "java/lang/invoke/MethodHandles$Lookup", "IMPL_LOOKUP", "Ljava/lang/invoke/MethodHandles$Lookup;"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 9); methodVisitor.visitLdcInsn("get$Lambda"); methodVisitor.visitVarInsn(Opcodes.ALOAD, 2); methodVisitor.visitMethodInsn(Opcodes.INVOKEVIRTUAL, "java/lang/invoke/MethodHandles$Lookup", "findStatic", "(Ljava/lang/Class;Ljava/lang/String;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/MethodHandle;", false); methodVisitor.visitMethodInsn(Opcodes.INVOKESPECIAL, "java/lang/invoke/ConstantCallSite", "<init>", "(Ljava/lang/invoke/MethodHandle;)V", false); methodVisitor.visitLabel(callSiteExit); methodVisitor.visitFrame(Opcodes.F_SAME1, 0, null, 1, new Object[]{"java/lang/invoke/CallSite"}); methodVisitor.visitInsn(Opcodes.ARETURN); methodVisitor.visitMaxs(9, 10); methodVisitor.visitEnd(); return IGNORE_ORIGINAL; } @Override public String toString() { return "AgentBuilder.LambdaInstrumentationStrategy.AlternativeMetaFactoryRedirection." + name(); } } } /** * <p> * The default implementation of an {@link net.bytebuddy.agent.builder.AgentBuilder}. * </p> * <p> * By default, Byte Buddy ignores any types loaded by the bootstrap class loader and * any synthetic type. Self-injection and rebasing is enabled. In order to avoid class format changes, set * {@link AgentBuilder#disableBootstrapInjection()}). All types are parsed without their debugging information ({@link PoolStrategy.Default#FAST}). * </p> */ class Default implements AgentBuilder.Redefining { /** * The name of the Byte Buddy {@code net.bytebuddy.agent.Installer} class. */ private static final String INSTALLER_TYPE = "net.bytebuddy.agent.Installer"; /** * The name of the {@code net.bytebuddy.agent.Installer} field containing an installed {@link Instrumentation}. */ private static final String INSTRUMENTATION_FIELD = "instrumentation"; /** * Indicator for access to a static member via reflection to make the code more readable. */ private static final Object STATIC_FIELD = null; /** * The value that is to be returned from a {@link java.lang.instrument.ClassFileTransformer} to indicate * that no class file transformation is to be applied. */ private static final byte[] NO_TRANSFORMATION = null; /** * Indicates that a loaded type should be considered as non-available. */ private static final Class<?> NO_LOADED_TYPE = null; /** * The {@link net.bytebuddy.ByteBuddy} instance to be used. */ private final ByteBuddy byteBuddy; /** * The listener to notify on transformations. */ private final Listener listener; /** * The type locator to use. */ private final PoolStrategy poolStrategy; /** * The definition handler to use. */ private final TypeStrategy typeStrategy; /** * The location strategy to use. */ private final LocationStrategy locationStrategy; /** * The native method strategy to use. */ private final NativeMethodStrategy nativeMethodStrategy; /** * The initialization strategy to use for creating classes. */ private final InitializationStrategy initializationStrategy; /** * The redefinition strategy to apply. */ private final RedefinitionStrategy redefinitionStrategy; /** * The batch allocator for the redefinition strategy to apply. */ private final RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator; /** * The failure handler for the redefinition strategy to apply. */ private final RedefinitionStrategy.FailureHandler redefinitionFailureHandler; /** * The redefinition listener for the redefinition strategy to apply. */ private final RedefinitionStrategy.Listener redefinitionListener; /** * The injection strategy for injecting classes into the bootstrap class loader. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * A strategy to determine of the {@code LambdaMetafactory} should be instrumented to allow for the instrumentation * of classes that represent lambda expressions. */ private final LambdaInstrumentationStrategy lambdaInstrumentationStrategy; /** * The description strategy for resolving type descriptions for types. */ private final DescriptionStrategy descriptionStrategy; /** * The installation strategy to use. */ private final InstallationStrategy installationStrategy; /** * The fallback strategy to apply. */ private final FallbackStrategy fallbackStrategy; /** * Identifies types that should not be instrumented. */ private final RawMatcher ignoredTypeMatcher; /** * The transformation object for handling type transformations. */ private final Transformation transformation; /** * Creates a new default agent builder that uses a default {@link net.bytebuddy.ByteBuddy} instance for creating classes. */ public Default() { this(new ByteBuddy()); } /** * Creates a new agent builder with default settings. By default, Byte Buddy ignores any types loaded by the bootstrap class loader, any * type within a {@code net.bytebuddy} package and any synthetic type. Self-injection and rebasing is enabled. In order to avoid class format * changes, set {@link AgentBuilder#disableBootstrapInjection()}). All types are parsed without their debugging information * ({@link PoolStrategy.Default#FAST}). * * @param byteBuddy The Byte Buddy instance to be used. */ public Default(ByteBuddy byteBuddy) { this(byteBuddy, Listener.NoOp.INSTANCE, PoolStrategy.Default.FAST, TypeStrategy.Default.REBASE, LocationStrategy.ForClassLoader.STRONG, NativeMethodStrategy.Disabled.INSTANCE, InitializationStrategy.SelfInjection.SPLIT, RedefinitionStrategy.DISABLED, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, RedefinitionStrategy.FailureHandler.Default.FAIL_FAST, RedefinitionStrategy.Listener.NoOp.INSTANCE, BootstrapInjectionStrategy.Disabled.INSTANCE, LambdaInstrumentationStrategy.DISABLED, DescriptionStrategy.Default.HYBRID, InstallationStrategy.Default.ESCALATING, FallbackStrategy.ByThrowableType.ofOptionalTypes(), new RawMatcher.Disjunction(new RawMatcher.ForElementMatchers(any(), isBootstrapClassLoader(), any()), new RawMatcher.ForElementMatchers(nameStartsWith("net.bytebuddy.").or(nameStartsWith("sun.reflect.")).<TypeDescription>or(isSynthetic()), any(), any())), Transformation.Ignored.INSTANCE); } /** * Creates a new default agent builder. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param redefinitionStrategy The redefinition strategy to apply. * @param redefinitionBatchAllocator The batch allocator for the redefinition strategy to apply. * @param redefinitionFailureHandler The failure handler for the redefinition strategy to apply. * @param redefinitionListener The redefinition listener for the redefinition strategy to apply. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param lambdaInstrumentationStrategy A strategy to determine of the {@code LambdaMetafactory} should be instrumented to allow for the * instrumentation of classes that represent lambda expressions. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param installationStrategy The installation strategy to use. * @param fallbackStrategy The fallback strategy to apply. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. */ protected Default(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, RedefinitionStrategy redefinitionStrategy, RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator, RedefinitionStrategy.FailureHandler redefinitionFailureHandler, RedefinitionStrategy.Listener redefinitionListener, BootstrapInjectionStrategy bootstrapInjectionStrategy, LambdaInstrumentationStrategy lambdaInstrumentationStrategy, DescriptionStrategy descriptionStrategy, InstallationStrategy installationStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { this.byteBuddy = byteBuddy; this.poolStrategy = poolStrategy; this.typeStrategy = typeStrategy; this.locationStrategy = locationStrategy; this.listener = listener; this.nativeMethodStrategy = nativeMethodStrategy; this.initializationStrategy = initializationStrategy; this.redefinitionStrategy = redefinitionStrategy; this.redefinitionBatchAllocator = redefinitionBatchAllocator; this.redefinitionFailureHandler = redefinitionFailureHandler; this.redefinitionListener = redefinitionListener; this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.lambdaInstrumentationStrategy = lambdaInstrumentationStrategy; this.descriptionStrategy = descriptionStrategy; this.installationStrategy = installationStrategy; this.fallbackStrategy = fallbackStrategy; this.ignoredTypeMatcher = ignoredTypeMatcher; this.transformation = transformation; } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. As {@link EntryPoint}, {@link EntryPoint.Default#REBASE} is implied. * * @param plugin The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(Plugin... plugin) { return of(Arrays.asList(plugin)); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. As {@link EntryPoint}, {@link EntryPoint.Default#REBASE} is implied. * * @param plugins The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(List<? extends Plugin> plugins) { return of(EntryPoint.Default.REBASE, plugins); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. * * @param entryPoint The build entry point to use. * @param plugin The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(EntryPoint entryPoint, Plugin... plugin) { return of(entryPoint, Arrays.asList(plugin)); } /** * Creates an {@link AgentBuilder} that realizes the provided build plugins. * * @param entryPoint The build entry point to use. * @param plugins The build plugins to apply as a Java agent. * @return An appropriate agent builder. */ public static AgentBuilder of(EntryPoint entryPoint, List<? extends Plugin> plugins) { AgentBuilder agentBuilder = new AgentBuilder.Default(entryPoint.getByteBuddy()).with(new TypeStrategy.ForBuildEntryPoint(entryPoint)); for (Plugin plugin : plugins) { agentBuilder = agentBuilder.type(plugin).transform(new Transformer.ForBuildPlugin(plugin)); } return agentBuilder; } @Override public AgentBuilder with(ByteBuddy byteBuddy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(Listener listener) { return new Default(byteBuddy, new Listener.Compound(this.listener, listener), poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(TypeStrategy typeStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(PoolStrategy poolStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(LocationStrategy locationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder enableNativeMethodPrefix(String prefix) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, NativeMethodStrategy.ForPrefix.of(prefix), initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableNativeMethodPrefix() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, NativeMethodStrategy.Disabled.INSTANCE, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy redefinitionStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, RedefinitionStrategy.FailureHandler.Default.FAIL_FAST, RedefinitionStrategy.Listener.NoOp.INSTANCE, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.FailureHandler redefinitionFailureHandler) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public Redefining with(RedefinitionStrategy.Listener redefinitionListener) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, new RedefinitionStrategy.Listener.Compound(this.redefinitionListener, redefinitionListener), bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(InitializationStrategy initializationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(DescriptionStrategy descriptionStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(InstallationStrategy installationStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder with(FallbackStrategy fallbackStrategy) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder) { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, new BootstrapInjectionStrategy.Enabled(folder, instrumentation), lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableBootstrapInjection() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, BootstrapInjectionStrategy.Disabled.INSTANCE, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder disableClassFormatChanges() { return new Default(byteBuddy.with(Implementation.Context.Disabled.Factory.INSTANCE), listener, poolStrategy, TypeStrategy.Default.REDEFINE_DECLARED_ONLY, locationStrategy, NativeMethodStrategy.Disabled.INSTANCE, InitializationStrategy.NoOp.INSTANCE, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type) { return JavaModule.isSupported() ? with(Listener.ModuleReadEdgeCompleting.of(instrumentation, false, type)) : this; } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module) { return assureReadEdgeTo(instrumentation, Arrays.asList(module)); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return with(new Listener.ModuleReadEdgeCompleting(instrumentation, false, new HashSet<JavaModule>(modules))); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type) { return JavaModule.isSupported() ? with(Listener.ModuleReadEdgeCompleting.of(instrumentation, true, type)) : this; } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module) { return assureReadEdgeFromAndTo(instrumentation, Arrays.asList(module)); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return with(new Listener.ModuleReadEdgeCompleting(instrumentation, true, new HashSet<JavaModule>(modules))); } @Override public Identified.Narrowable type(RawMatcher matcher) { return new Transforming(matcher, Transformer.NoOp.INSTANCE, false); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher) { return type(typeMatcher, any()); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return type(typeMatcher, classLoaderMatcher, any()); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return type(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, not(supportsModules()).or(moduleMatcher))); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher) { return ignore(typeMatcher, any()); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return ignore(typeMatcher, classLoaderMatcher, any()); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return ignore(new RawMatcher.ForElementMatchers(typeMatcher, classLoaderMatcher, not(supportsModules()).or(moduleMatcher))); } @Override public Ignored ignore(RawMatcher rawMatcher) { return new Ignoring(rawMatcher); } @Override public ResettableClassFileTransformer makeRaw() { return ExecutingTransformer.FACTORY.make(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public ResettableClassFileTransformer installOn(Instrumentation instrumentation) { ResettableClassFileTransformer classFileTransformer = makeRaw(); instrumentation.addTransformer(classFileTransformer, redefinitionStrategy.isRetransforming(instrumentation)); try { if (nativeMethodStrategy.isEnabled(instrumentation)) { instrumentation.setNativeMethodPrefix(classFileTransformer, nativeMethodStrategy.getPrefix()); } lambdaInstrumentationStrategy.apply(byteBuddy, instrumentation, classFileTransformer); if (redefinitionStrategy.isEnabled()) { RedefinitionStrategy.Delegate<?> delegate = redefinitionStrategy.make(transformation); for (Class<?> type : instrumentation.getAllLoadedClasses()) { JavaModule module = JavaModule.ofType(type); try { TypePool typePool = poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader()); try { delegate.consider(ignoredTypeMatcher, listener, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), type, typePool), type, type, module, !instrumentation.isModifiableClass(type)); } catch (Throwable throwable) { if (descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(type, throwable)) { delegate.consider(ignoredTypeMatcher, listener, typePool.describe(TypeDescription.ForLoadedType.getName(type)).resolve(), type, NO_LOADED_TYPE, module); } else { throw throwable; } } } catch (Throwable throwable) { try { try { listener.onError(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module, throwable); } finally { listener.onComplete(TypeDescription.ForLoadedType.getName(type), type.getClassLoader(), module); } } catch (Throwable ignored) { // Ignore exceptions that are thrown by listeners to mimic the behavior of a transformation. } } } delegate.apply(instrumentation, locationStrategy, listener, redefinitionBatchAllocator, redefinitionListener, redefinitionFailureHandler); } return classFileTransformer; } catch (Throwable throwable) { return installationStrategy.onError(instrumentation, classFileTransformer, throwable); } } @Override public ResettableClassFileTransformer installOnByteBuddyAgent() { try { Instrumentation instrumentation = (Instrumentation) ClassLoader.getSystemClassLoader() .loadClass(INSTALLER_TYPE) .getDeclaredField(INSTRUMENTATION_FIELD) .get(STATIC_FIELD); if (instrumentation == null) { throw new IllegalStateException("The Byte Buddy agent is not installed"); } return installOn(instrumentation); } catch (RuntimeException exception) { throw exception; } catch (Exception exception) { throw new IllegalStateException("The Byte Buddy agent is not installed or not accessible", exception); } } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Default aDefault = (Default) other; return byteBuddy.equals(aDefault.byteBuddy) && listener.equals(aDefault.listener) && poolStrategy.equals(aDefault.poolStrategy) && nativeMethodStrategy.equals(aDefault.nativeMethodStrategy) && typeStrategy.equals(aDefault.typeStrategy) && locationStrategy.equals(aDefault.locationStrategy) && initializationStrategy == aDefault.initializationStrategy && redefinitionStrategy == aDefault.redefinitionStrategy && redefinitionBatchAllocator.equals(aDefault.redefinitionBatchAllocator) && redefinitionFailureHandler.equals(aDefault.redefinitionFailureHandler) && redefinitionListener.equals(aDefault.redefinitionListener) && bootstrapInjectionStrategy.equals(aDefault.bootstrapInjectionStrategy) && lambdaInstrumentationStrategy.equals(aDefault.lambdaInstrumentationStrategy) && descriptionStrategy.equals(aDefault.descriptionStrategy) && installationStrategy.equals(aDefault.installationStrategy) && fallbackStrategy.equals(aDefault.fallbackStrategy) && ignoredTypeMatcher.equals(aDefault.ignoredTypeMatcher) && transformation.equals(aDefault.transformation); } @Override public int hashCode() { int result = byteBuddy.hashCode(); result = 31 * result + listener.hashCode(); result = 31 * result + poolStrategy.hashCode(); result = 31 * result + typeStrategy.hashCode(); result = 31 * result + locationStrategy.hashCode(); result = 31 * result + nativeMethodStrategy.hashCode(); result = 31 * result + initializationStrategy.hashCode(); result = 31 * result + redefinitionStrategy.hashCode(); result = 31 * result + redefinitionBatchAllocator.hashCode(); result = 31 * result + redefinitionFailureHandler.hashCode(); result = 31 * result + redefinitionListener.hashCode(); result = 31 * result + bootstrapInjectionStrategy.hashCode(); result = 31 * result + lambdaInstrumentationStrategy.hashCode(); result = 31 * result + descriptionStrategy.hashCode(); result = 31 * result + installationStrategy.hashCode(); result = 31 * result + fallbackStrategy.hashCode(); result = 31 * result + ignoredTypeMatcher.hashCode(); result = 31 * result + transformation.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default{" + "byteBuddy=" + byteBuddy + ", listener=" + listener + ", poolStrategy=" + poolStrategy + ", typeStrategy=" + typeStrategy + ", locationStrategy=" + locationStrategy + ", nativeMethodStrategy=" + nativeMethodStrategy + ", initializationStrategy=" + initializationStrategy + ", redefinitionStrategy=" + redefinitionStrategy + ", redefinitionBatchAllocator=" + redefinitionBatchAllocator + ", redefinitionFailureHandler=" + redefinitionFailureHandler + ", redefinitionListener=" + redefinitionListener + ", bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", lambdaInstrumentationStrategy=" + lambdaInstrumentationStrategy + ", descriptionStrategy=" + descriptionStrategy + ", installationStrategy=" + installationStrategy + ", fallbackStrategy=" + fallbackStrategy + ", ignoredTypeMatcher=" + ignoredTypeMatcher + ", transformation=" + transformation + '}'; } /** * An injection strategy for injecting classes into the bootstrap class loader. */ protected interface BootstrapInjectionStrategy { /** * Creates an injector for the bootstrap class loader. * * @param protectionDomain The protection domain to be used. * @return A class injector for the bootstrap class loader. */ ClassInjector make(ProtectionDomain protectionDomain); /** * A disabled bootstrap injection strategy. */ enum Disabled implements BootstrapInjectionStrategy { /** * The singleton instance. */ INSTANCE; @Override public ClassInjector make(ProtectionDomain protectionDomain) { throw new IllegalStateException("Injecting classes into the bootstrap class loader was not enabled"); } @Override public String toString() { return "AgentBuilder.Default.BootstrapInjectionStrategy.Disabled." + name(); } } /** * An enabled bootstrap injection strategy. */ class Enabled implements BootstrapInjectionStrategy { /** * The folder in which jar files are to be saved. */ private final File folder; /** * The instrumentation to use for appending jar files. */ private final Instrumentation instrumentation; /** * Creates a new enabled bootstrap class loader injection strategy. * * @param folder The folder in which jar files are to be saved. * @param instrumentation The instrumentation to use for appending jar files. */ public Enabled(File folder, Instrumentation instrumentation) { this.folder = folder; this.instrumentation = instrumentation; } @Override public ClassInjector make(ProtectionDomain protectionDomain) { return ClassInjector.UsingInstrumentation.of(folder, ClassInjector.UsingInstrumentation.Target.BOOTSTRAP, instrumentation); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Enabled enabled = (Enabled) other; return folder.equals(enabled.folder) && instrumentation.equals(enabled.instrumentation); } @Override public int hashCode() { int result = folder.hashCode(); result = 31 * result + instrumentation.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.BootstrapInjectionStrategy.Enabled{" + "folder=" + folder + ", instrumentation=" + instrumentation + '}'; } } } /** * A strategy for determining if a native method name prefix should be used when rebasing methods. */ protected interface NativeMethodStrategy { /** * Determines if this strategy enables name prefixing for native methods. * * @param instrumentation The instrumentation used. * @return {@code true} if this strategy indicates that a native method prefix should be used. */ boolean isEnabled(Instrumentation instrumentation); /** * Resolves the method name transformer for this strategy. * * @return A method name transformer for this strategy. */ MethodNameTransformer resolve(); /** * Returns the method prefix if the strategy is enabled. This method must only be called if this strategy enables prefixing. * * @return The method prefix. */ String getPrefix(); /** * A native method strategy that suffixes method names with a random suffix and disables native method rebasement. */ enum Disabled implements NativeMethodStrategy { /** * The singleton instance. */ INSTANCE; @Override public MethodNameTransformer resolve() { return MethodNameTransformer.Suffixing.withRandomSuffix(); } @Override public boolean isEnabled(Instrumentation instrumentation) { return false; } @Override public String getPrefix() { throw new IllegalStateException("A disabled native method strategy does not define a method name prefix"); } @Override public String toString() { return "AgentBuilder.Default.NativeMethodStrategy.Disabled." + name(); } } /** * A native method strategy that prefixes method names with a fixed value for supporting rebasing of native methods. */ class ForPrefix implements NativeMethodStrategy { /** * The method name prefix. */ private final String prefix; /** * Creates a new name prefixing native method strategy. * * @param prefix The method name prefix. */ protected ForPrefix(String prefix) { this.prefix = prefix; } /** * Creates a new native method strategy for prefixing method names. * * @param prefix The method name prefix. * @return An appropriate native method strategy. */ protected static NativeMethodStrategy of(String prefix) { if (prefix.length() == 0) { throw new IllegalArgumentException("A method name prefix must not be the empty string"); } return new ForPrefix(prefix); } @Override public MethodNameTransformer resolve() { return new MethodNameTransformer.Prefixing(prefix); } @Override public boolean isEnabled(Instrumentation instrumentation) { if (!instrumentation.isNativeMethodPrefixSupported()) { throw new IllegalArgumentException("A prefix for native methods is not supported: " + instrumentation); } return true; } @Override public String getPrefix() { return prefix; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && prefix.equals(((ForPrefix) other).prefix); } @Override public int hashCode() { return prefix.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.NativeMethodStrategy.ForPrefix{" + "prefix='" + prefix + '\'' + '}'; } } } /** * A transformation serves as a handler for modifying a class. */ protected interface Transformation { /** * Checks if this transformation is alive. * * @param typeDescription A description of the type that is to be transformed. * @param classLoader The class loader of the type being transformed. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined In case of a type redefinition, the loaded type being transformed or {@code null} if that is not the case. * @param protectionDomain The protection domain of the type being transformed. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @return {@code true} if this transformation is alive. */ boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher); /** * Resolves an attempted transformation to a specific transformation. * * @param typeDescription A description of the type that is to be transformed. * @param classLoader The class loader of the type being transformed. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param classBeingRedefined In case of a type redefinition, the loaded type being transformed or {@code null} if that is not the case. * @param protectionDomain The protection domain of the type being transformed. * @param typePool The type pool to apply during type creation. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @return A resolution for the given type. */ Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher); /** * A resolution to a transformation. */ interface Resolution { /** * Returns the sort of this resolution. * * @return The sort of this resolution. */ Sort getSort(); /** * Resolves this resolution as a decorator of the supplied resolution. * * @param resolution The resolution for which this resolution should serve as a decorator. * @return A resolution where this resolution is applied as a decorator if this resolution is alive. */ Resolution asDecoratorOf(Resolution resolution); /** * Resolves this resolution as a decorator of the supplied resolution. * * @param resolution The resolution for which this resolution should serve as a decorator. * @return A resolution where this resolution is applied as a decorator if this resolution is alive. */ Resolution prepend(Decoratable resolution); /** * Transforms a type or returns {@code null} if a type is not to be transformed. * * @param initializationStrategy The initialization strategy to use. * @param classFileLocator The class file locator to use. * @param typeStrategy The definition handler to use. * @param byteBuddy The Byte Buddy instance to use. * @param methodNameTransformer The method name transformer to be used. * @param bootstrapInjectionStrategy The bootstrap injection strategy to be used. * @param accessControlContext The access control context to be used. * @param listener The listener to be invoked to inform about an applied or non-applied transformation. * @return The class file of the transformed class or {@code null} if no transformation is attempted. */ byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener); /** * Describes a specific sort of a {@link Resolution}. */ enum Sort { /** * A terminal resolution. After discovering such a resolution, no further transformers are considered. */ TERMINAL(true), /** * A resolution that can serve as a decorator for another resolution. After discovering such a resolution * further transformations are considered where the represented resolution is prepended if applicable. */ DECORATOR(true), /** * A non-resolved resolution. */ UNDEFINED(false); /** * Indicates if this sort represents an active resolution. */ private final boolean alive; /** * Creates a new resolution sort. * * @param alive Indicates if this sort represents an active resolution. */ Sort(boolean alive) { this.alive = alive; } /** * Returns {@code true} if this resolution is alive. * * @return {@code true} if this resolution is alive. */ protected boolean isAlive() { return alive; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Resolution.Sort." + name(); } } /** * A resolution that can be decorated by a transformer. */ interface Decoratable extends Resolution { /** * Appends the supplied transformer to this resolution. * * @param transformer The transformer to append to the transformer that is represented bz this instance. * @return A new resolution with the supplied transformer appended to this transformer. */ Resolution append(Transformer transformer); } /** * A canonical implementation of a non-resolved resolution. */ class Unresolved implements Resolution { /** * The type that is not transformed. */ private final TypeDescription typeDescription; /** * The unresolved type's class loader. */ private final ClassLoader classLoader; /** * The non-transformed type's module or {@code null} if the current VM does not support modules. */ private final JavaModule module; /** * Creates a new unresolved resolution. * * @param typeDescription The type that is not transformed. * @param classLoader The unresolved type's class loader. * @param module The non-transformed type's module or {@code null} if the current VM does not support modules. */ protected Unresolved(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module) { this.typeDescription = typeDescription; this.classLoader = classLoader; this.module = module; } @Override public Sort getSort() { return Sort.UNDEFINED; } @Override public Resolution asDecoratorOf(Resolution resolution) { return resolution; } @Override public Resolution prepend(Decoratable resolution) { return resolution; } @Override public byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener) { listener.onIgnored(typeDescription, classLoader, module); return NO_TRANSFORMATION; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Unresolved that = (Unresolved) object; return typeDescription.equals(that.typeDescription) && (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (module != null ? module.equals(that.module) : that.module == null); } @Override public int hashCode() { int result = typeDescription.hashCode(); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (module != null ? module.hashCode() : 0); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Resolution.Unresolved{" + "typeDescription=" + typeDescription + ", classLoader=" + classLoader + ", module=" + module + '}'; } } } /** * A transformation that does not attempt to transform any type. */ enum Ignored implements Transformation { /** * The singleton instance. */ INSTANCE; @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { return false; } @Override public Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { return new Resolution.Unresolved(typeDescription, classLoader, module); } @Override public String toString() { return "AgentBuilder.Default.Transformation.Ignored." + name(); } } /** * A simple, active transformation. */ class Simple implements Transformation { /** * The raw matcher that is represented by this transformation. */ private final RawMatcher rawMatcher; /** * The transformer that is represented by this transformation. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new transformation. * * @param rawMatcher The raw matcher that is represented by this transformation. * @param transformer The transformer that is represented by this transformation. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Simple(RawMatcher rawMatcher, Transformer transformer, boolean decorator) { this.rawMatcher = rawMatcher; this.transformer = transformer; this.decorator = decorator; } @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { return !ignoredTypeMatcher.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain) && rawMatcher.matches(typeDescription, classLoader, module, classBeingRedefined, protectionDomain); } @Override public Transformation.Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { return isAlive(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, ignoredTypeMatcher) ? new Resolution(typeDescription, classLoader, module, protectionDomain, typePool, transformer, decorator) : new Transformation.Resolution.Unresolved(typeDescription, classLoader, module); } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && decorator == ((Simple) other).decorator && rawMatcher.equals(((Simple) other).rawMatcher) && transformer.equals(((Simple) other).transformer); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + transformer.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple{" + "rawMatcher=" + rawMatcher + ", transformer=" + transformer + ", decorator=" + decorator + '}'; } /** * A resolution that performs a type transformation. */ protected static class Resolution implements Transformation.Resolution.Decoratable { /** * A description of the transformed type. */ private final TypeDescription typeDescription; /** * The class loader of the transformed type. */ private final ClassLoader classLoader; /** * The transformed type's module or {@code null} if the current VM does not support modules. */ private final JavaModule module; /** * The protection domain of the transformed type. */ private final ProtectionDomain protectionDomain; /** * The type pool to apply during type creation. */ private final TypePool typePool; /** * The transformer to be applied. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new active transformation. * * @param typeDescription A description of the transformed type. * @param classLoader The class loader of the transformed type. * @param module The transformed type's module or {@code null} if the current VM does not support modules. * @param protectionDomain The protection domain of the transformed type. * @param typePool The type pool to apply during type creation. * @param transformer The transformer to be applied. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Resolution(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, ProtectionDomain protectionDomain, TypePool typePool, Transformer transformer, boolean decorator) { this.typeDescription = typeDescription; this.classLoader = classLoader; this.module = module; this.protectionDomain = protectionDomain; this.typePool = typePool; this.transformer = transformer; this.decorator = decorator; } @Override public Sort getSort() { return decorator ? Sort.DECORATOR : Sort.TERMINAL; } @Override public Transformation.Resolution asDecoratorOf(Transformation.Resolution resolution) { return resolution.prepend(this); } @Override public Transformation.Resolution prepend(Decoratable resolution) { return resolution.append(transformer); } @Override public Transformation.Resolution append(Transformer transformer) { return new Resolution(typeDescription, classLoader, module, protectionDomain, typePool, new Transformer.Compound(this.transformer, transformer), decorator); } @Override public byte[] apply(InitializationStrategy initializationStrategy, ClassFileLocator classFileLocator, TypeStrategy typeStrategy, ByteBuddy byteBuddy, NativeMethodStrategy methodNameTransformer, BootstrapInjectionStrategy bootstrapInjectionStrategy, AccessControlContext accessControlContext, Listener listener) { InitializationStrategy.Dispatcher dispatcher = initializationStrategy.dispatcher(); DynamicType.Unloaded<?> dynamicType = dispatcher.apply(transformer.transform(typeStrategy.builder(typeDescription, byteBuddy, classFileLocator, methodNameTransformer.resolve()), typeDescription, classLoader)).make(TypeResolutionStrategy.Disabled.INSTANCE, typePool); dispatcher.register(dynamicType, classLoader, new BootstrapClassLoaderCapableInjectorFactory(bootstrapInjectionStrategy, classLoader, protectionDomain)); listener.onTransformation(typeDescription, classLoader, module, dynamicType); return dynamicType.getBytes(); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; Resolution that = (Resolution) other; return typeDescription.equals(that.typeDescription) && decorator == that.decorator && !(classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) && !(module != null ? !module.equals(that.module) : that.module != null) && !(protectionDomain != null ? !protectionDomain.equals(that.protectionDomain) : that.protectionDomain != null) && typePool.equals(that.typePool) && transformer.equals(that.transformer); } @Override public int hashCode() { int result = typeDescription.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (module != null ? module.hashCode() : 0); result = 31 * result + (protectionDomain != null ? protectionDomain.hashCode() : 0); result = 31 * result + transformer.hashCode(); result = 31 * result + typePool.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple.Resolution{" + "typeDescription=" + typeDescription + ", classLoader=" + classLoader + ", module=" + module + ", protectionDomain=" + protectionDomain + ", typePool=" + typePool + ", transformer=" + transformer + ", decorator=" + decorator + '}'; } /** * An injector factory that resolves to a bootstrap class loader injection if this is necessary and enabled. */ protected static class BootstrapClassLoaderCapableInjectorFactory implements InitializationStrategy.Dispatcher.InjectorFactory { /** * The bootstrap injection strategy being used. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * The class loader for which to create an injection factory. */ private final ClassLoader classLoader; /** * The protection domain of the created classes. */ private final ProtectionDomain protectionDomain; /** * Creates a new bootstrap class loader capable injector factory. * * @param bootstrapInjectionStrategy The bootstrap injection strategy being used. * @param classLoader The class loader for which to create an injection factory. * @param protectionDomain The protection domain of the created classes. */ protected BootstrapClassLoaderCapableInjectorFactory(BootstrapInjectionStrategy bootstrapInjectionStrategy, ClassLoader classLoader, ProtectionDomain protectionDomain) { this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.classLoader = classLoader; this.protectionDomain = protectionDomain; } @Override public ClassInjector resolve() { return classLoader == null ? bootstrapInjectionStrategy.make(protectionDomain) : new ClassInjector.UsingReflection(classLoader, protectionDomain); } @Override public boolean equals(Object other) { if (this == other) return true; if (other == null || getClass() != other.getClass()) return false; BootstrapClassLoaderCapableInjectorFactory that = (BootstrapClassLoaderCapableInjectorFactory) other; return bootstrapInjectionStrategy.equals(that.bootstrapInjectionStrategy) && !(classLoader != null ? !classLoader.equals(that.classLoader) : that.classLoader != null) && !(protectionDomain != null ? !protectionDomain.equals(that.protectionDomain) : that.protectionDomain != null); } @Override public int hashCode() { int result = bootstrapInjectionStrategy.hashCode(); result = 31 * result + (protectionDomain != null ? protectionDomain.hashCode() : 0); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); return result; } @Override public String toString() { return "AgentBuilder.Default.Transformation.Simple.Resolution.BootstrapClassLoaderCapableInjectorFactory{" + "bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", classLoader=" + classLoader + ", protectionDomain=" + protectionDomain + '}'; } } } } /** * A compound transformation that applied several transformation in the given order and applies the first active transformation. */ class Compound implements Transformation { /** * The list of transformations to apply in their application order. */ private final List<? extends Transformation> transformations; /** * Creates a new compound transformation. * * @param transformation An array of transformations to apply in their application order. */ protected Compound(Transformation... transformation) { this(Arrays.asList(transformation)); } /** * Creates a new compound transformation. * * @param transformations A list of transformations to apply in their application order. */ protected Compound(List<? extends Transformation> transformations) { this.transformations = transformations; } @Override public boolean isAlive(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, RawMatcher ignoredTypeMatcher) { for (Transformation transformation : transformations) { if (transformation.isAlive(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, ignoredTypeMatcher)) { return true; } } return false; } @Override public Resolution resolve(TypeDescription typeDescription, ClassLoader classLoader, JavaModule module, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, RawMatcher ignoredTypeMatcher) { Resolution current = new Resolution.Unresolved(typeDescription, classLoader, module); for (Transformation transformation : transformations) { Resolution resolution = transformation.resolve(typeDescription, classLoader, module, classBeingRedefined, protectionDomain, typePool, ignoredTypeMatcher); switch (resolution.getSort()) { case TERMINAL: return current.asDecoratorOf(resolution); case DECORATOR: current = current.asDecoratorOf(resolution); break; case UNDEFINED: break; default: throw new IllegalStateException("Unexpected resolution type: " + resolution.getSort()); } } return current; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && transformations.equals(((Compound) other).transformations); } @Override public int hashCode() { return transformations.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.Transformation.Compound{" + "transformations=" + transformations + '}'; } } } /** * A {@link java.lang.instrument.ClassFileTransformer} that implements the enclosing agent builder's * configuration. */ protected static class ExecutingTransformer extends ResettableClassFileTransformer.AbstractBase { /** * A factory for creating a {@link ClassFileTransformer} that supports the features of the current VM. */ protected static final Factory FACTORY = AccessController.doPrivileged(FactoryCreationOption.INSTANCE); /** * The Byte Buddy instance to be used. */ private final ByteBuddy byteBuddy; /** * The type locator to use. */ private final PoolStrategy poolStrategy; /** * The definition handler to use. */ private final TypeStrategy typeStrategy; /** * The listener to notify on transformations. */ private final Listener listener; /** * The native method strategy to apply. */ private final NativeMethodStrategy nativeMethodStrategy; /** * The initialization strategy to use for transformed types. */ private final InitializationStrategy initializationStrategy; /** * The injection strategy for injecting classes into the bootstrap class loader. */ private final BootstrapInjectionStrategy bootstrapInjectionStrategy; /** * The description strategy for resolving type descriptions for types. */ private final DescriptionStrategy descriptionStrategy; /** * The location strategy to use. */ private final LocationStrategy locationStrategy; /** * The fallback strategy to use. */ private final FallbackStrategy fallbackStrategy; /** * Identifies types that should not be instrumented. */ private final RawMatcher ignoredTypeMatcher; /** * The transformation object for handling type transformations. */ private final Transformation transformation; /** * The access control context to use for loading classes. */ private final AccessControlContext accessControlContext; /** * Creates a new class file transformer. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param fallbackStrategy The fallback strategy to use. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. */ public ExecutingTransformer(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { this.byteBuddy = byteBuddy; this.typeStrategy = typeStrategy; this.poolStrategy = poolStrategy; this.locationStrategy = locationStrategy; this.listener = listener; this.nativeMethodStrategy = nativeMethodStrategy; this.initializationStrategy = initializationStrategy; this.bootstrapInjectionStrategy = bootstrapInjectionStrategy; this.descriptionStrategy = descriptionStrategy; this.fallbackStrategy = fallbackStrategy; this.ignoredTypeMatcher = ignoredTypeMatcher; this.transformation = transformation; accessControlContext = AccessController.getContext(); } @Override public byte[] transform(ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { return AccessController.doPrivileged(new LegacyVmDispatcher(classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation), accessControlContext); } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. Invoking this method * allows to process module information which is available since Java 9. * * @param rawModule The instrumented class's Java {@code java.lang.reflect.Module}. * @param classLoader The type's class loader or {@code null} if the type is loaded by the bootstrap loader. * @param internalTypeName The internal name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param binaryRepresentation The class file of the instrumented class in its current state. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ protected byte[] transform(Object rawModule, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { return AccessController.doPrivileged(new Java9CapableVmDispatcher(rawModule, classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation), accessControlContext); } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. * * @param module The instrumented class's Java module in its wrapped form or {@code null} if the current VM does not support modules. * @param classLoader The instrumented class's class loader. * @param internalTypeName The internal name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param binaryRepresentation The class file of the instrumented class in its current state. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ private byte[] transform(JavaModule module, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { if (internalTypeName == null) { return NO_TRANSFORMATION; } String typeName = internalTypeName.replace('/', '.'); try { ClassFileLocator classFileLocator = ClassFileLocator.Simple.of(typeName, binaryRepresentation, locationStrategy.classFileLocator(classLoader, module)); TypePool typePool = poolStrategy.typePool(classFileLocator, classLoader); try { return doTransform(module, classLoader, typeName, classBeingRedefined, protectionDomain, typePool, classFileLocator); } catch (Throwable throwable) { if (classBeingRedefined != null && descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(classBeingRedefined, throwable)) { return doTransform(module, classLoader, typeName, NO_LOADED_TYPE, protectionDomain, typePool, classFileLocator); } else { throw throwable; } } } catch (Throwable throwable) { listener.onError(typeName, classLoader, module, throwable); return NO_TRANSFORMATION; } finally { listener.onComplete(typeName, classLoader, module); } } /** * Applies a transformation for a class that was captured by this {@link ClassFileTransformer}. * * @param module The instrumented class's Java module in its wrapped form or {@code null} if the current VM does not support modules. * @param classLoader The instrumented class's class loader. * @param typeName The binary name of the instrumented class. * @param classBeingRedefined The loaded {@link Class} being redefined or {@code null} if no such class exists. * @param protectionDomain The instrumented type's protection domain. * @param typePool The type pool to use. * @param classFileLocator The class file locator to use. * @return The transformed class file or an empty byte array if this transformer does not apply an instrumentation. */ private byte[] doTransform(JavaModule module, ClassLoader classLoader, String typeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, TypePool typePool, ClassFileLocator classFileLocator) { return transformation.resolve(descriptionStrategy.apply(typeName, classBeingRedefined, typePool), classLoader, module, classBeingRedefined, protectionDomain, typePool, ignoredTypeMatcher).apply(initializationStrategy, classFileLocator, typeStrategy, byteBuddy, nativeMethodStrategy, bootstrapInjectionStrategy, accessControlContext, listener); } @Override public Reset reset(Instrumentation instrumentation, RedefinitionStrategy redefinitionStrategy, RedefinitionStrategy.BatchAllocator redefinitionBatchAllocator, RedefinitionStrategy.Listener redefinitionListener) { if (instrumentation.removeTransformer(this)) { if (!redefinitionStrategy.isEnabled()) { return Reset.Simple.ACTIVE; } redefinitionStrategy.isRetransforming(instrumentation); Map<Class<?>, Throwable> failures = new HashMap<Class<?>, Throwable>(); RedefinitionStrategy.Delegate<?> delegate = redefinitionStrategy.make(transformation); for (Class<?> type : instrumentation.getAllLoadedClasses()) { JavaModule module = JavaModule.ofType(type); try { delegate.consider(ignoredTypeMatcher, Listener.NoOp.INSTANCE, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), type, poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader())), type, type, module, !instrumentation.isModifiableClass(type)); } catch (Throwable throwable) { try { if (descriptionStrategy.isLoadedFirst() && fallbackStrategy.isFallback(type, throwable)) { delegate.consider(ignoredTypeMatcher, Listener.NoOp.INSTANCE, descriptionStrategy.apply(TypeDescription.ForLoadedType.getName(type), NO_LOADED_TYPE, poolStrategy.typePool(locationStrategy.classFileLocator(type.getClassLoader(), module), type.getClassLoader())), type, NO_LOADED_TYPE, module); } else { failures.put(type, throwable); } } catch (Throwable fallback) { failures.put(type, fallback); } } } delegate.apply(instrumentation, locationStrategy, Listener.NoOp.INSTANCE, RedefinitionStrategy.BatchAllocator.ForTotal.INSTANCE, new RedefinitionStrategy.Listener.Compound(new FailureCollectingListener(failures), redefinitionListener), RedefinitionStrategy.FailureHandler.Default.SUPPRESSING); return Reset.WithErrors.ofPotentiallyErroneous(failures); } else { return Reset.Simple.INACTIVE; } } /* does not implement hashCode and equals in order to align with identity treatment of the JVM */ @Override public String toString() { return "AgentBuilder.Default." + getClass().getSimpleName() + "{" + "byteBuddy=" + byteBuddy + ", listener=" + listener + ", poolStrategy=" + poolStrategy + ", typeStrategy=" + typeStrategy + ", locationStrategy=" + locationStrategy + ", initializationStrategy=" + initializationStrategy + ", nativeMethodStrategy=" + nativeMethodStrategy + ", bootstrapInjectionStrategy=" + bootstrapInjectionStrategy + ", descriptionStrategy=" + descriptionStrategy + ", fallbackStrategy=" + fallbackStrategy + ", ignoredTypeMatcher=" + ignoredTypeMatcher + ", transformation=" + transformation + ", accessControlContext=" + accessControlContext + '}'; } /** * A factory for creating a {@link ClassFileTransformer} for the current VM. */ protected interface Factory { /** * Creates a new class file transformer for the current VM. * * @param byteBuddy The Byte Buddy instance to be used. * @param listener The listener to notify on transformations. * @param poolStrategy The type locator to use. * @param typeStrategy The definition handler to use. * @param locationStrategy The location strategy to use. * @param nativeMethodStrategy The native method strategy to apply. * @param initializationStrategy The initialization strategy to use for transformed types. * @param bootstrapInjectionStrategy The injection strategy for injecting classes into the bootstrap class loader. * @param descriptionStrategy The description strategy for resolving type descriptions for types. * @param fallbackStrategy The fallback strategy to use. * @param ignoredTypeMatcher Identifies types that should not be instrumented. * @param transformation The transformation object for handling type transformations. * @return A class file transformer for the current VM that supports the API of the current VM. */ ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation); /** * A factory for a class file transformer on a JVM that supports the {@code java.lang.reflect.Module} API to override * the newly added method of the {@link ClassFileTransformer} to capture an instrumented class's module. */ class ForJava9CapableVm implements Factory { /** * A constructor for creating a {@link ClassFileTransformer} that overrides the newly added method for extracting * the {@code java.lang.reflect.Module} of an instrumented class. */ private final Constructor<? extends ResettableClassFileTransformer> executingTransformer; /** * Creates a class file transformer factory for a Java 9 capable VM. * * @param executingTransformer A constructor for creating a {@link ClassFileTransformer} that overrides the newly added * method for extracting the {@code java.lang.reflect.Module} of an instrumented class. */ protected ForJava9CapableVm(Constructor<? extends ResettableClassFileTransformer> executingTransformer) { this.executingTransformer = executingTransformer; } @Override public ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { try { return executingTransformer.newInstance(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } catch (IllegalAccessException exception) { throw new IllegalStateException("Cannot access " + executingTransformer, exception); } catch (InstantiationException exception) { throw new IllegalStateException("Cannot instantiate " + executingTransformer.getDeclaringClass(), exception); } catch (InvocationTargetException exception) { throw new IllegalStateException("Cannot invoke " + executingTransformer, exception.getCause()); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; ForJava9CapableVm that = (ForJava9CapableVm) object; return executingTransformer.equals(that.executingTransformer); } @Override public int hashCode() { return executingTransformer.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Factory.ForJava9CapableVm{" + "executingTransformer=" + executingTransformer + '}'; } } /** * A factory for a {@link ClassFileTransformer} on a VM that does not support the {@code java.lang.reflect.Module} API. */ enum ForLegacyVm implements Factory { /** * The singleton instance. */ INSTANCE; @Override public ResettableClassFileTransformer make(ByteBuddy byteBuddy, Listener listener, PoolStrategy poolStrategy, TypeStrategy typeStrategy, LocationStrategy locationStrategy, NativeMethodStrategy nativeMethodStrategy, InitializationStrategy initializationStrategy, BootstrapInjectionStrategy bootstrapInjectionStrategy, DescriptionStrategy descriptionStrategy, FallbackStrategy fallbackStrategy, RawMatcher ignoredTypeMatcher, Transformation transformation) { return new ExecutingTransformer(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, bootstrapInjectionStrategy, descriptionStrategy, fallbackStrategy, ignoredTypeMatcher, transformation); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Factory.ForLegacyVm." + name(); } } } /** * An action to create an implementation of {@link ExecutingTransformer} that support Java 9 modules. */ protected enum FactoryCreationOption implements PrivilegedAction<Factory> { /** * The singleton instance. */ INSTANCE; @Override @SuppressFBWarnings(value = "REC_CATCH_EXCEPTION", justification = "Exception should not be rethrown but trigger a fallback") public Factory run() { try { return new Factory.ForJava9CapableVm(new ByteBuddy() .subclass(ExecutingTransformer.class) .name(ExecutingTransformer.class.getName() + "$ByteBuddy$ModuleSupport") .method(named("transform").and(takesArgument(0, JavaType.MODULE.load()))) .intercept(MethodCall.invoke(ExecutingTransformer.class.getDeclaredMethod("transform", Object.class, ClassLoader.class, String.class, Class.class, ProtectionDomain.class, byte[].class)).onSuper().withAllArguments()) .make() .load(ExecutingTransformer.class.getClassLoader(), ClassLoadingStrategy.Default.WRAPPER_PERSISTENT.with(ExecutingTransformer.class.getProtectionDomain())) .getLoaded() .getDeclaredConstructor(ByteBuddy.class, Listener.class, PoolStrategy.class, TypeStrategy.class, LocationStrategy.class, NativeMethodStrategy.class, InitializationStrategy.class, BootstrapInjectionStrategy.class, DescriptionStrategy.class, FallbackStrategy.class, RawMatcher.class, Transformation.class)); } catch (Exception ignored) { return Factory.ForLegacyVm.INSTANCE; } } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.InheritanceAction." + name(); } } /** * A privileged action for transforming a class on a JVM prior to Java 9. */ protected class LegacyVmDispatcher implements PrivilegedAction<byte[]> { /** * The type's class loader or {@code null} if the bootstrap class loader is represented. */ private final ClassLoader classLoader; /** * The type's internal name or {@code null} if no such name exists. */ private final String internalTypeName; /** * The class being redefined or {@code null} if no such class exists. */ private final Class<?> classBeingRedefined; /** * The type's protection domain. */ private final ProtectionDomain protectionDomain; /** * The type's binary representation. */ private final byte[] binaryRepresentation; /** * Creates a new type transformation dispatcher. * * @param classLoader The type's class loader or {@code null} if the bootstrap class loader is represented. * @param internalTypeName The type's internal name or {@code null} if no such name exists. * @param classBeingRedefined The class being redefined or {@code null} if no such class exists. * @param protectionDomain The type's protection domain. * @param binaryRepresentation The type's binary representation. */ protected LegacyVmDispatcher(ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { this.classLoader = classLoader; this.internalTypeName = internalTypeName; this.classBeingRedefined = classBeingRedefined; this.protectionDomain = protectionDomain; this.binaryRepresentation = binaryRepresentation; } @Override public byte[] run() { return transform(JavaModule.UNSUPPORTED, classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation); } /** * Returns the outer instance. * * @return The outer instance. */ private ExecutingTransformer getOuter() { return ExecutingTransformer.this; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; LegacyVmDispatcher that = (LegacyVmDispatcher) object; return (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (internalTypeName != null ? internalTypeName.equals(that.internalTypeName) : that.internalTypeName == null) && (classBeingRedefined != null ? classBeingRedefined.equals(that.classBeingRedefined) : that.classBeingRedefined == null) && protectionDomain.equals(that.protectionDomain) && ExecutingTransformer.this.equals(that.getOuter()) && Arrays.equals(binaryRepresentation, that.binaryRepresentation); } @Override public int hashCode() { int result = classLoader != null ? classLoader.hashCode() : 0; result = 31 * result + (internalTypeName != null ? internalTypeName.hashCode() : 0); result = 31 * result + (classBeingRedefined != null ? classBeingRedefined.hashCode() : 0); result = 31 * result + protectionDomain.hashCode(); result = 31 * result + ExecutingTransformer.this.hashCode(); result = 31 * result + Arrays.hashCode(binaryRepresentation); return result; } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.LegacyVmDispatcher{" + "outer=" + ExecutingTransformer.this + ", classLoader=" + classLoader + ", internalTypeName='" + internalTypeName + '\'' + ", classBeingRedefined=" + classBeingRedefined + ", protectionDomain=" + protectionDomain + ", binaryRepresentation=<" + binaryRepresentation.length + " bytes>" + '}'; } } /** * A privileged action for transforming a class on a JVM that supports modules. */ protected class Java9CapableVmDispatcher implements PrivilegedAction<byte[]> { /** * The type's {@code java.lang.reflect.Module}. */ private final Object rawModule; /** * The type's class loader or {@code null} if the type is loaded by the bootstrap loader. */ private final ClassLoader classLoader; /** * The type's internal name or {@code null} if no such name exists. */ private final String internalTypeName; /** * The class being redefined or {@code null} if no such class exists. */ private final Class<?> classBeingRedefined; /** * The type's protection domain. */ private final ProtectionDomain protectionDomain; /** * The type's binary representation. */ private final byte[] binaryRepresentation; /** * Creates a new legacy dispatcher. * * @param rawModule The type's {@code java.lang.reflect.Module}. * @param classLoader The type's class loader or {@code null} if the type is loaded by the bootstrap loader. * @param internalTypeName The type's internal name or {@code null} if no such name exists. * @param classBeingRedefined The class being redefined or {@code null} if no such class exists. * @param protectionDomain The type's protection domain. * @param binaryRepresentation The type's binary representation. */ protected Java9CapableVmDispatcher(Object rawModule, ClassLoader classLoader, String internalTypeName, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] binaryRepresentation) { this.rawModule = rawModule; this.classLoader = classLoader; this.internalTypeName = internalTypeName; this.classBeingRedefined = classBeingRedefined; this.protectionDomain = protectionDomain; this.binaryRepresentation = binaryRepresentation; } @Override public byte[] run() { return transform(JavaModule.of(rawModule), classLoader, internalTypeName, classBeingRedefined, protectionDomain, binaryRepresentation); } /** * Returns the outer instance. * * @return The outer instance. */ private ExecutingTransformer getOuter() { return ExecutingTransformer.this; } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; Java9CapableVmDispatcher that = (Java9CapableVmDispatcher) object; return rawModule.equals(that.rawModule) && (classLoader != null ? classLoader.equals(that.classLoader) : that.classLoader == null) && (internalTypeName != null ? internalTypeName.equals(that.internalTypeName) : that.internalTypeName == null) && (classBeingRedefined != null ? classBeingRedefined.equals(that.classBeingRedefined) : that.classBeingRedefined == null) && protectionDomain.equals(that.protectionDomain) && ExecutingTransformer.this.equals(that.getOuter()) && Arrays.equals(binaryRepresentation, that.binaryRepresentation); } @Override public int hashCode() { int result = rawModule.hashCode(); result = 31 * result + (classLoader != null ? classLoader.hashCode() : 0); result = 31 * result + (internalTypeName != null ? internalTypeName.hashCode() : 0); result = 31 * result + (classBeingRedefined != null ? classBeingRedefined.hashCode() : 0); result = 31 * result + protectionDomain.hashCode(); result = 31 * result + ExecutingTransformer.this.hashCode(); result = 31 * result + Arrays.hashCode(binaryRepresentation); return result; } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.Java9CapableVmDispatcher{" + "outer=" + ExecutingTransformer.this + ", rawModule=" + rawModule + ", classLoader=" + classLoader + ", internalTypeName='" + internalTypeName + '\'' + ", classBeingRedefined=" + classBeingRedefined + ", protectionDomain=" + protectionDomain + ", binaryRepresentation=<" + binaryRepresentation.length + " bytes>" + '}'; } } /** * A listener that adds all discovered errors to a map. */ protected static class FailureCollectingListener extends RedefinitionStrategy.Listener.Adapter { /** * A mapping of failures by the class that causes this failure. */ private final Map<Class<?>, Throwable> failures; /** * Creates a new failure collecting listener. * * @param failures A mapping of failures by the class that causes this failure. */ protected FailureCollectingListener(Map<Class<?>, Throwable> failures) { this.failures = failures; } @Override public void onError(int index, List<Class<?>> batch, Throwable throwable, List<Class<?>> types) { for (Class<?> type : batch) { failures.put(type, throwable); } } @Override public boolean equals(Object object) { if (this == object) return true; if (object == null || getClass() != object.getClass()) return false; FailureCollectingListener that = (FailureCollectingListener) object; return failures.equals(that.failures); } @Override public int hashCode() { return failures.hashCode(); } @Override public String toString() { return "AgentBuilder.Default.ExecutingTransformer.FailureCollectingListener{" + "failures=" + failures + '}'; } } } /** * An abstract implementation of an agent builder that delegates all invocation to another instance. * * @param <T> The type that is produced by chaining a matcher. */ protected abstract class Delegator<T extends Matchable<T>> extends Matchable.AbstractBase<T> implements AgentBuilder { /** * Materializes the currently described {@link net.bytebuddy.agent.builder.AgentBuilder}. * * @return An agent builder that represents the currently described entry of this instance. */ protected abstract AgentBuilder materialize(); @Override public AgentBuilder with(ByteBuddy byteBuddy) { return materialize().with(byteBuddy); } @Override public AgentBuilder with(Listener listener) { return materialize().with(listener); } @Override public AgentBuilder with(TypeStrategy typeStrategy) { return materialize().with(typeStrategy); } @Override public AgentBuilder with(PoolStrategy poolStrategy) { return materialize().with(poolStrategy); } @Override public AgentBuilder with(LocationStrategy locationStrategy) { return materialize().with(locationStrategy); } @Override public AgentBuilder with(InitializationStrategy initializationStrategy) { return materialize().with(initializationStrategy); } @Override public Redefining with(RedefinitionStrategy redefinitionStrategy) { return materialize().with(redefinitionStrategy); } @Override public AgentBuilder with(LambdaInstrumentationStrategy lambdaInstrumentationStrategy) { return materialize().with(lambdaInstrumentationStrategy); } @Override public AgentBuilder with(DescriptionStrategy descriptionStrategy) { return materialize().with(descriptionStrategy); } @Override public AgentBuilder with(InstallationStrategy installationStrategy) { return materialize().with(installationStrategy); } @Override public AgentBuilder with(FallbackStrategy fallbackStrategy) { return materialize().with(fallbackStrategy); } @Override public AgentBuilder enableBootstrapInjection(Instrumentation instrumentation, File folder) { return materialize().enableBootstrapInjection(instrumentation, folder); } @Override public AgentBuilder disableBootstrapInjection() { return materialize().disableBootstrapInjection(); } @Override public AgentBuilder enableNativeMethodPrefix(String prefix) { return materialize().enableNativeMethodPrefix(prefix); } @Override public AgentBuilder disableNativeMethodPrefix() { return materialize().disableNativeMethodPrefix(); } @Override public AgentBuilder disableClassFormatChanges() { return materialize().disableClassFormatChanges(); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Class<?>... type) { return materialize().assureReadEdgeTo(instrumentation, type); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, JavaModule... module) { return materialize().assureReadEdgeTo(instrumentation, module); } @Override public AgentBuilder assureReadEdgeTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return materialize().assureReadEdgeTo(instrumentation, modules); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Class<?>... type) { return materialize().assureReadEdgeFromAndTo(instrumentation, type); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, JavaModule... module) { return materialize().assureReadEdgeFromAndTo(instrumentation, module); } @Override public AgentBuilder assureReadEdgeFromAndTo(Instrumentation instrumentation, Collection<? extends JavaModule> modules) { return materialize().assureReadEdgeFromAndTo(instrumentation, modules); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher) { return materialize().type(typeMatcher); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher) { return materialize().type(typeMatcher, classLoaderMatcher); } @Override public Identified.Narrowable type(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return materialize().type(typeMatcher, classLoaderMatcher, moduleMatcher); } @Override public Identified.Narrowable type(RawMatcher matcher) { return materialize().type(matcher); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> ignoredTypes) { return materialize().ignore(ignoredTypes); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> ignoredTypes, ElementMatcher<? super ClassLoader> ignoredClassLoaders) { return materialize().ignore(ignoredTypes, ignoredClassLoaders); } @Override public Ignored ignore(ElementMatcher<? super TypeDescription> typeMatcher, ElementMatcher<? super ClassLoader> classLoaderMatcher, ElementMatcher<? super JavaModule> moduleMatcher) { return materialize().ignore(typeMatcher, classLoaderMatcher, moduleMatcher); } @Override public Ignored ignore(RawMatcher rawMatcher) { return materialize().ignore(rawMatcher); } @Override public ResettableClassFileTransformer makeRaw() { return materialize().makeRaw(); } @Override public ResettableClassFileTransformer installOn(Instrumentation instrumentation) { return materialize().installOn(instrumentation); } @Override public ResettableClassFileTransformer installOnByteBuddyAgent() { return materialize().installOnByteBuddyAgent(); } } /** * A delegator transformer for further precising what types to ignore. */ protected class Ignoring extends Delegator<Ignored> implements Ignored { /** * A matcher for identifying types that should not be instrumented. */ private final RawMatcher rawMatcher; /** * Creates a new agent builder for further specifying what types to ignore. * * @param rawMatcher A matcher for identifying types that should not be instrumented. */ protected Ignoring(RawMatcher rawMatcher) { this.rawMatcher = rawMatcher; } @Override protected AgentBuilder materialize() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, rawMatcher, transformation); } @Override public Ignored and(RawMatcher rawMatcher) { return new Ignoring(new RawMatcher.Conjunction(this.rawMatcher, rawMatcher)); } @Override public Ignored or(RawMatcher rawMatcher) { return new Ignoring(new RawMatcher.Disjunction(this.rawMatcher, rawMatcher)); } /** * Returns the outer instance. * * @return The outer instance. */ private Default getOuter() { return Default.this; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && rawMatcher.equals(((Ignoring) other).rawMatcher) && Default.this.equals(((Ignoring) other).getOuter()); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + Default.this.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Ignoring{" + "rawMatcher=" + rawMatcher + ", agentBuilder=" + Default.this + '}'; } } /** * A helper class that describes a {@link net.bytebuddy.agent.builder.AgentBuilder.Default} after supplying * a {@link net.bytebuddy.agent.builder.AgentBuilder.RawMatcher} such that one or several * {@link net.bytebuddy.agent.builder.AgentBuilder.Transformer}s can be supplied. */ protected class Transforming extends Delegator<Identified.Narrowable> implements Identified.Extendable, Identified.Narrowable { /** * The supplied raw matcher. */ private final RawMatcher rawMatcher; /** * The supplied transformer. */ private final Transformer transformer; /** * {@code true} if this transformer serves as a decorator. */ private final boolean decorator; /** * Creates a new matched default agent builder. * * @param rawMatcher The supplied raw matcher. * @param transformer The supplied transformer. * @param decorator {@code true} if this transformer serves as a decorator. */ protected Transforming(RawMatcher rawMatcher, Transformer transformer, boolean decorator) { this.rawMatcher = rawMatcher; this.transformer = transformer; this.decorator = decorator; } @Override protected AgentBuilder materialize() { return new Default(byteBuddy, listener, poolStrategy, typeStrategy, locationStrategy, nativeMethodStrategy, initializationStrategy, redefinitionStrategy, redefinitionBatchAllocator, redefinitionFailureHandler, redefinitionListener, bootstrapInjectionStrategy, lambdaInstrumentationStrategy, descriptionStrategy, installationStrategy, fallbackStrategy, ignoredTypeMatcher, new Transformation.Compound(new Transformation.Simple(rawMatcher, transformer, decorator), transformation)); } @Override public Identified.Extendable transform(Transformer transformer) { return new Transforming(rawMatcher, new Transformer.Compound(this.transformer, transformer), decorator); } @Override public AgentBuilder asDecorator() { return new Transforming(rawMatcher, transformer, true); } @Override public Narrowable and(RawMatcher rawMatcher) { return new Transforming(new RawMatcher.Conjunction(this.rawMatcher, rawMatcher), transformer, decorator); } @Override public Narrowable or(RawMatcher rawMatcher) { return new Transforming(new RawMatcher.Disjunction(this.rawMatcher, rawMatcher), transformer, decorator); } /** * Returns the outer instance. * * @return The outer instance. */ private Default getOuter() { return Default.this; } @Override public boolean equals(Object other) { return this == other || !(other == null || getClass() != other.getClass()) && decorator == ((Transforming) other).decorator && rawMatcher.equals(((Transforming) other).rawMatcher) && transformer.equals(((Transforming) other).transformer) && Default.this.equals(((Transforming) other).getOuter()); } @Override public int hashCode() { int result = rawMatcher.hashCode(); result = 31 * result + (decorator ? 1 : 0); result = 31 * result + transformer.hashCode(); result = 31 * result + Default.this.hashCode(); return result; } @Override public String toString() { return "AgentBuilder.Default.Transforming{" + "rawMatcher=" + rawMatcher + ", transformer=" + transformer + ", decorator=" + decorator + ", agentBuilder=" + Default.this + '}'; } } } }
Added javadoc refinement.
byte-buddy-dep/src/main/java/net/bytebuddy/agent/builder/AgentBuilder.java
Added javadoc refinement.
Java
apache-2.0
7e3a5de0d6dc1606b2a4c5f1ad3f46d5048237a9
0
GELOG/sample-hbase-app
package org.gelog.sys870.hbaseapp; import java.io.IOException; import java.io.InputStream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.Table; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HBaseApp { private static final Logger LOG = LoggerFactory.getLogger(HBaseConfiguration.class); public static void main( String[] args ) throws IOException { new HBaseApp(); } public HBaseApp() throws IOException { Configuration conf; Connection conn; String zkConnectionString; // Scanner reader = new Scanner(System.in); // Reading from System.in // System.out.println("Enter a number: "); // int n = reader.nextInt(); System.out.println("Setting up HBase configuration ..."); conf = configureHBase(); System.out.println("\t" + getPropertyTraceability(conf, "hbase.zookeeper.quorum") ); System.out.println("\t" + getPropertyTraceability(conf, "hbase.zookeeper.property.clientPort") ); // Note: Verify that the client can connect to ZooKeeper (normally not required) System.out.println("Connecting manually to ZooKeeper (not required) ..."); zkConnectionString = conf.get( "hbase.zookeeper.quorum" ); testingZooKeeper( zkConnectionString ); //System.exit(1); System.out.println("Using HBase client to connect to the ZooKeeper Quorum ..."); conn = ConnectionFactory.createConnection( conf ); ScanningMetaTable( conn ); System.exit(1); } public void createTable( Connection conn ) throws IOException { Admin admin; String tableName; TableName tableNameH; // TableName used by HBase (bytes ?) HTableDescriptor table; String family; admin = conn.getAdmin(); tableName = "demo-table"; tableNameH = TableName.valueOf( tableName ); if ( admin.tableExists(tableNameH) ) { System.out.println("Table already exists. Deleting table " + tableName); admin.disableTable( tableNameH ); admin.deleteTable( tableNameH ); } System.out.println("Creating table " + tableName); family = "cf"; table = new HTableDescriptor( tableNameH ); table.addFamily( new HColumnDescriptor( family ) ); admin.createTable( table ); } public Configuration configureHBase() { Configuration conf; InputStream confStream; conf = HBaseConfiguration.create(); confStream = conf.getConfResourceAsInputStream("hello.xml"); int available = 0; try { available = confStream.available(); } catch (Exception e) { //for debug purpose System.out.println("configuration files not found locally"); } finally { IOUtils.closeQuietly( confStream ); } if (available == 0 ) { conf = new Configuration(); conf.addResource("core-site.xml"); conf.addResource("hbase-site.xml"); conf.addResource("hdfs-site.xml"); } // Add any necessary configuration files (hbase-site.xml, core-site.xml) //config.addResource(new Path(System.getenv("HBASE_CONF_DIR"), "hbase-site.xml")); //config.addResource(new Path(System.getenv("HADOOP_CONF_DIR"), "core-site.xml")); return conf; } public String getPropertyTraceability( Configuration conf, String key ) { String value; String[] sources; String source; value = conf.get( key ); sources = conf.getPropertySources( key ); // Only keep the most recent source (last in the array) source = (sources != null ? sources[sources.length-1] : ""); return key + " = " + value + " (" + source + ")"; } public void testingZooKeeper( String zkConnectionString ) throws IOException { ZooKeeperWrapper zk; int zkSessionTimeout; zkSessionTimeout = 3000; zk = new ZooKeeperWrapper( zkConnectionString, zkSessionTimeout ); System.out.println("Listing paths in ZooKeeper recursively ..."); zk.list( "/" ); zk.disconnect(); } public void ScanningMetaTable( Connection connection ) throws IOException { String tableName; TableName tableNameH; // TableName used by HBase (bytes ?) Table table; tableName = "hbase:meta"; tableNameH = TableName.valueOf( tableName ); table = connection.getTable( tableNameH ); connection.getRegionLocator(tableNameH).getRegionLocation("row1".getBytes()).getHostname(); } }
src/main/java/org/gelog/sys870/hbaseapp/HBaseApp.java
package org.gelog.sys870.hbaseapp; import java.io.Console; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.List; import java.util.Scanner; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.WatchedEvent; import org.apache.zookeeper.Watcher; import org.apache.zookeeper.ZooKeeper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class HBaseApp { private static final Logger LOG = LoggerFactory.getLogger(HBaseConfiguration.class); public static void main( String[] args ) throws IOException { System.out.println( "Hello World!" ); new HBaseApp(); } public HBaseApp() throws IOException { Configuration conf; Connection conn; Admin admin; String tableName; TableName tableNameH; // TableName used by HBase (bytes ?) HTableDescriptor table; String family; InputStream confStream; // Scanner reader = new Scanner(System.in); // Reading from System.in // System.out.println("Enter a number: "); // int n = reader.nextInt(); System.out.println("Setting up HBase configuration ..."); conf = HBaseConfiguration.create(); confStream = conf.getConfResourceAsInputStream("hello.xml"); int available = 0; try { available = confStream.available(); } catch (Exception e) { //for debug purpose System.out.println("configuration files not found locally"); } finally { IOUtils.closeQuietly( confStream ); } if (available == 0 ) { conf = new Configuration(); conf.addResource("core-site.xml"); conf.addResource("hbase-site.xml"); conf.addResource("hdfs-site.xml"); } ///conf.set("hbase.zookeeper.quorum", "localhost", "david"); System.out.println("Connecting to HBase ZooKeeper Quorum ..."); System.out.println("\t" + getPropertyTraceability(conf, "hbase.zookeeper.quorum") ); System.out.println("\t" + getPropertyTraceability(conf, "hbase.zookeeper.property.clientPort") ); ZooKeeperWrapper zk; String zkConnectionString; int zkSessionTimeout; zkConnectionString = "192.168.99.100"; zkSessionTimeout = 3000; zk = new ZooKeeperWrapper( zkConnectionString, zkSessionTimeout ); System.out.println("Listing paths in ZooKeeper recursively ..."); zk.list( "/" ); zk.disconnect(); //System.exit(1); conn = ConnectionFactory.createConnection( conf ); tableNameH = TableName.valueOf("hbase:meta"); System.exit(1); admin = conn.getAdmin(); tableName = "demo-table"; tableNameH = TableName.valueOf( tableName ); if ( admin.tableExists(tableNameH) ) { System.out.println("Table already exists. Deleting table " + tableName); admin.disableTable( tableNameH ); admin.deleteTable( tableNameH ); } System.out.println("Creating table " + tableName); family = "cf"; table = new HTableDescriptor( tableNameH ); table.addFamily( new HColumnDescriptor( family ) ); admin.createTable( table ); // Add any necessary configuration files (hbase-site.xml, core-site.xml) //config.addResource(new Path(System.getenv("HBASE_CONF_DIR"), "hbase-site.xml")); //config.addResource(new Path(System.getenv("HADOOP_CONF_DIR"), "core-site.xml")); } public String getPropertyTraceability( Configuration conf, String key ) { String value; String[] sources; String source; value = conf.get( key ); sources = conf.getPropertySources( key ); // Only keep the most recent source (last in the array) source = (sources != null ? sources[sources.length-1] : ""); return key + " = " + value + " (" + source + ")"; } }
Refactored to clean up the code and split it into functions.
src/main/java/org/gelog/sys870/hbaseapp/HBaseApp.java
Refactored to clean up the code and split it into functions.
Java
apache-2.0
ddb1eb95554fa75111324484c6800cb40f3801ad
0
endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS,endeavourhealth/EDS
package org.endeavourhealth.queuereader; import OpenPseudonymiser.Crypto; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Strings; import org.apache.commons.csv.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.endeavourhealth.common.cache.ObjectMapperPool; import org.endeavourhealth.common.config.ConfigManager; import org.endeavourhealth.common.security.datasharingmanagermodel.models.database.DataSharingAgreementEntity; import org.endeavourhealth.common.security.usermanagermodel.models.caching.DataSharingAgreementCache; import org.endeavourhealth.common.security.usermanagermodel.models.caching.OrganisationCache; import org.endeavourhealth.common.security.usermanagermodel.models.caching.ProjectCache; import org.endeavourhealth.common.utility.FileHelper; import org.endeavourhealth.common.utility.ThreadPool; import org.endeavourhealth.common.utility.ThreadPoolError; import org.endeavourhealth.core.configuration.ConfigDeserialiser; import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig; import org.endeavourhealth.core.configuration.QueueReaderConfiguration; import org.endeavourhealth.core.database.dal.DalProvider; import org.endeavourhealth.core.database.dal.admin.LibraryRepositoryHelper; import org.endeavourhealth.core.database.dal.admin.ServiceDalI; import org.endeavourhealth.core.database.dal.admin.models.Service; import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI; import org.endeavourhealth.core.database.dal.audit.ExchangeDalI; import org.endeavourhealth.core.database.dal.audit.models.*; import org.endeavourhealth.core.database.dal.datagenerator.SubscriberZipFileUUIDsDalI; import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI; import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI; import org.endeavourhealth.core.database.dal.ehr.ResourceDalI; import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper; import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMappingAudit; import org.endeavourhealth.core.database.dal.reference.PostcodeDalI; import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberOrgMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberPersonMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberResourceMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.models.SubscriberId; import org.endeavourhealth.core.database.rdbms.ConnectionManager; import org.endeavourhealth.core.database.rdbms.enterprise.EnterpriseConnector; import org.endeavourhealth.core.exceptions.TransformException; import org.endeavourhealth.core.fhirStorage.FhirStorageService; import org.endeavourhealth.core.fhirStorage.ServiceInterfaceEndpoint; import org.endeavourhealth.core.messaging.pipeline.components.MessageTransformOutbound; import org.endeavourhealth.core.messaging.pipeline.components.OpenEnvelope; import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange; import org.endeavourhealth.core.queueing.QueueHelper; import org.endeavourhealth.core.xml.QueryDocument.*; import org.endeavourhealth.transform.common.*; import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer; import org.endeavourhealth.transform.subscriber.targetTables.OutputContainer; import org.endeavourhealth.transform.subscriber.targetTables.SubscriberTableId; import org.endeavourhealth.transform.tpp.TppCsvToFhirTransformer; import org.hibernate.internal.SessionImpl; import org.hl7.fhir.instance.model.MedicationStatement; import org.hl7.fhir.instance.model.ResourceType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import java.io.*; import java.lang.System; import java.lang.reflect.Constructor; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.StandardOpenOption; import java.sql.*; import java.text.SimpleDateFormat; import java.util.*; import java.util.Date; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; public class Main { private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static void main(String[] args) throws Exception { String configId = args[0]; LOG.info("Initialising config manager"); ConfigManager.initialize("queuereader", configId); /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEncounters")) { String table = args[1]; fixEncounters(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("DeleteEnterpriseObs")) { String filePath = args[1]; String configName = args[2]; int batchSize = Integer.parseInt(args[3]); deleteEnterpriseObs(filePath, configName, batchSize); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixTppStaffBulks")) { boolean testMode = Boolean.parseBoolean(args[1]); String odsCodeRegex = null; if (args.length > 2) { odsCodeRegex = args[2]; } fixTppStaffBulks(testMode, odsCodeRegex); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("TestDSM")) { String odsCode = args[1]; String projectId = args[2]; testDsm(odsCode, projectId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FindMissedExchanges")) { String tableName = args[1]; String odsCodeRegex = null; if (args.length > 2) { odsCodeRegex = args[2]; } findMissedExchanges(tableName, odsCodeRegex); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("SendPatientsToSubscriber")) { String tableName = args[1]; sendPatientsToSubscriber(tableName); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateDeleteZipsForSubscriber")) { int batchSize = Integer.parseInt(args[1]); String sourceTable = args[2]; int subscriberId = Integer.parseInt(args[3]); createDeleteZipsForSubscriber(batchSize, sourceTable, subscriberId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestJMX")) { testJmx(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestDatabases")) { String serviceIdStr = args[1]; String subscriberConfigName = args[2]; testDatabases(serviceIdStr, subscriberConfigName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulatePatientSearchEpisodeOdsCode")) { populatePatientSearchEpisodeOdsCode(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisSnomedCodes")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixEmisSnomedCodes(odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisDrugRecords")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixEmisDrugRecords(odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberDBPseudoId")) { String subscriberConfigName = args[1]; String saltKeyName = args[2]; populateSubscriberPseudoId(subscriberConfigName, saltKeyName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("InvestigateMissingPatients")) { String nhsNumberFile = args[1]; String protocolName = args[2]; String subscriberConfigName = args[3]; String odsCodeRegex = args[4]; investigateMissingPatients(nhsNumberFile, protocolName, subscriberConfigName, odsCodeRegex); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixMedicationStatementIsActive")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixMedicationStatementIsActive(odsCodeRegex); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixMissingEmisEthnicities")) { String filePath = args[1]; String odsCodeRegex = null; if (args.length > 2) { odsCodeRegex = args[2]; } fixMissingEmisEthnicities(filePath, odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("UpdatePatientSearch")) { String filePath = args[1]; updatePatientSearch(filePath); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("SubscriberFullLoad")) { UUID serviceId = UUID.fromString(args[1]); UUID protocolId = UUID.fromString(args[2]); QueueHelper.queueUpFullServiceForPopulatingSubscriber(serviceId, protocolId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunPersonUpdater")) { String enterpriseConfigName = args[1]; runPersonUpdater(enterpriseConfigName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CountNhsNumberChanges")) { String odsCode = args[1]; countNhsNumberChanges(odsCode); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("TransformPatients")) { String sourceFile = args[1]; transformPatients(sourceFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindPatientsThatNeedTransforming")) { String file = args[1]; String odsCode = null; if (args.length > 2) { odsCode = args[2]; } findPatientsThatNeedTransforming(file, odsCode); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateDigest")) { String url = args[1]; String user = args[2]; String pass = args[3]; String table = args[4]; String columnFrom = args[5]; String columnTo = args[6]; String base64Salt = args[7]; String validNhsNumberCol = null; if (args.length > 8) { validNhsNumberCol = args[8]; } createDigest(url, user, pass, table, columnFrom, columnTo, base64Salt, validNhsNumberCol); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertAudits2")) { String configName = args[1]; String tempTable = args[2]; int threads = Integer.parseInt(args[3]); int batchSize = Integer.parseInt(args[4]); boolean testMode = Boolean.parseBoolean(args[5]); convertFhirAudits2(configName, tempTable, threads, batchSize, testMode); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertAudits")) { String configName = args[2]; int threads = Integer.parseInt(args[3]); int batchSize = Integer.parseInt(args[4]); convertFhirAudits(configName, threads, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestRabbit")) { String nodes = args[1]; String username = args[2]; String password = args[3]; String exchangeName = args[4]; String queueName = args[5]; String sslProtocol = null; if (args.length > 6) { sslProtocol = args[6]; } testRabbit(nodes, username, password, sslProtocol, exchangeName, queueName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisEpisodes1")) { String odsCode = args[1]; //fixEmisEpisodes1(odsCode); fixEmisEpisodes2(odsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Listing")) { String path = args[1]; testS3Listing(path); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CheckForBartsMissingFiles")) { String sinceDate = args[1]; checkForBartsMissingFiles(sinceDate); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateHomertonSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateAdastraSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateVisionSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTppSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createTppSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsSubset")) { String sourceDirPath = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String samplePatientsFile = args[4]; createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateEmisSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createEmisSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindBartsPersonIds")) { String sourceFile = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String dateCutoffStr = args[4]; String dstFile = args[5]; findBartsPersonIds(sourceFile, serviceUuid, systemUuid, dateCutoffStr, dstFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixTPPNullOrgs")) { String sourceDirPath = args[1]; String orgODS = args[2]; LOG.info("Fixing TPP Null Organisations"); fixTPPNullOrgs(sourceDirPath, orgODS); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisDeletedPatients")) { String odsCode = args[1]; fixEmisDeletedPatients(odsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostPatientToProtocol")) { String odsCode = args[1]; String patientUuid = args[2]; postPatientToProtocol(odsCode, patientUuid); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("PostPatientsToProtocol")) { UUID serviceId = UUID.fromString(args[1]); UUID systemId = UUID.fromString(args[2]); String sourceFile = args[3]; postPatientsToProtocol(serviceId, systemId, sourceFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestMetrics")) { testMetrics(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestXML")) { testXml(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestGraphiteMetrics")) { String host = args[1]; String port = args[2]; testGraphiteMetrics(host, port); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsOrgs")) { String serviceId = args[1]; fixBartsOrgs(serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestPreparedStatements")) { String url = args[1]; String user = args[2]; String pass = args[3]; String serviceId = args[4]; testPreparedStatements(url, user, pass, serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTransformMap")) { UUID serviceId = UUID.fromString(args[1]); String table = args[2]; String dstFile = args[3]; createTransforMap(serviceId, table, dstFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportFhirToCsv")) { UUID serviceId = UUID.fromString(args[1]); String path = args[2]; exportFhirToCsv(serviceId, path); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestBatchInserts")) { String url = args[1]; String user = args[2]; String pass = args[3]; String num = args[4]; String batchSize = args[5]; testBatchInserts(url, user, pass, num, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) { applyEmisAdminCaches(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSubscribers")) { fixSubscriberDbs(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Read")) { String s3Bucket = args[1]; String s3Key = args[2]; String start = args[3]; String len = args[4]; testS3Read(s3Bucket, s3Key, start, len); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) { String publisherId = args[1]; String systemId = args[2]; fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CheckDeletedObs")) { String serviceId = args[1]; String systemId = args[2]; checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) { fixPersonsNoNhsNumber(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CalculateUprnPseudoIds")) { String subscriberConfigName = args[1]; String targetTable = args[2]; calculateUprnPseudoIds(subscriberConfigName, targetTable); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) { String subscriberConfigName = args[1]; Integer overrideBatchSize = null; if (args.length > 2) { overrideBatchSize = Integer.valueOf(args[2]); } String patientId = null; if (args.length > 3) { patientId = args[3]; } populateSubscriberUprnTable(subscriberConfigName, overrideBatchSize, patientId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertEmisGuid")) { convertEmisGuids(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("PostToRabbit")) { String exchangeName = args[1]; String srcFile = args[2]; Integer throttle = null; if (args.length > 3) { throttle = Integer.parseInt(args[3]); } postToRabbit(exchangeName, srcFile, throttle); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PostExchangesToProtocol")) { String srcFile = args[1]; postExchangesToProtocol(srcFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsPatients")) { UUID serviceId = UUID.fromString(args[1]); fixBartsPatients(serviceId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixDeceasedPatients")) { String subscriberConfig = args[1]; fixDeceasedPatients(subscriberConfig); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPseudoIds")) { String subscriberConfig = args[1]; int threads = Integer.parseInt(args[2]); fixPseudoIds(subscriberConfig, threads); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("MoveS3ToAudit")) { int threads = Integer.parseInt(args[1]); moveS3ToAudit(threads); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertExchangeBody")) { String systemId = args[1]; convertExchangeBody(UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixReferrals")) { fixReferralRequests(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateNewSearchTable")) { String table = args[1]; populateNewSearchTable(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsEscapes")) { String filePath = args[1]; fixBartsEscapedFiles(filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; String systemId = args[2]; String filePath = args[3]; postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixDisabledExtract")) { String sharedStoragePath = args[1]; String tempDir = args[2]; String systemId = args[3]; String serviceOdsCode = args[4]; fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisMissingSlots")) { String serviceOdsCode = args[1]; fixEmisMissingSlots(serviceOdsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateLastDataDate")) { int threads = Integer.parseInt(args[1]); int batchSize = Integer.parseInt(args[2]); populateLastDataDate(threads, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestSlack")) { testSlack(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; boolean all = Boolean.parseBoolean(args[2]); postToInbound(UUID.fromString(serviceId), all); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixPatientSearch")) { String serviceId = args[1]; String systemId = null; if (args.length > 2) { systemId = args[2]; } if (serviceId.equalsIgnoreCase("All")) { fixPatientSearchAllServices(systemId); } else { fixPatientSearch(serviceId, systemId); } System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSlotReferences")) { String serviceId = args[1]; try { UUID serviceUuid = UUID.fromString(serviceId); fixSlotReferences(serviceUuid); } catch (Exception ex) { fixSlotReferencesForPublisher(serviceId); } System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("TestAuditingFile")) { UUID serviceId = UUID.fromString(args[1]); UUID systemId = UUID.fromString(args[2]); UUID exchangeId = UUID.fromString(args[3]); String version = args[4]; String filePath = args[5]; testAuditingFile(serviceId, systemId, exchangeId, version, filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3VsMySQL")) { UUID serviceUuid = UUID.fromString(args[1]); int count = Integer.parseInt(args[2]); int sqlBatchSize = Integer.parseInt(args[3]); String bucketName = args[4]; testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("Exit")) { String exitCode = args[1]; LOG.info("Exiting with error code " + exitCode); int exitCodeInt = Integer.parseInt(exitCode); System.exit(exitCodeInt); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunSql")) { String host = args[1]; String username = args[2]; String password = args[3]; String sqlFile = args[4]; runSql(host, username, password, sqlFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateProtocolQueue")) { String serviceId = null; if (args.length > 1) { serviceId = args[1]; } String startingExchangeId = null; if (args.length > 2) { startingExchangeId = args[2]; } populateProtocolQueue(serviceId, startingExchangeId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEncounterTerms")) { String path = args[1]; String outputPath = args[2]; findEncounterTerms(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEmisStartDates")) { String path = args[1]; String outputPath = args[2]; findEmisStartDates(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportHl7Encounters")) { String sourceCsvPpath = args[1]; String outputPath = args[2]; exportHl7Encounters(sourceCsvPpath, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixExchangeBatches")) { fixExchangeBatches(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindCodes")) { findCodes(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindDeletedOrgs")) { findDeletedOrgs(); System.exit(0); }*/ if (args.length >= 0 && args[0].equalsIgnoreCase("LoadEmisData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String onlyThisFileType = null; if (args.length > 6) { onlyThisFileType = args[6]; } loadEmisData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateEmisDataTables")) { createEmisDataTables(); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("LoadBartsData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String startDate = args[6]; String onlyThisFileType = null; if (args.length > 7) { onlyThisFileType = args[7]; } loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, startDate, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsDataTables")) { createBartsDataTables(); System.exit(0); } if (args.length != 1) { LOG.error("Usage: queuereader config_id"); return; } LOG.info("--------------------------------------------------"); LOG.info("EDS Queue Reader " + configId); LOG.info("--------------------------------------------------"); LOG.info("Fetching queuereader configuration"); String configXml = ConfigManager.getConfiguration(configId); QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); /*LOG.info("Registering shutdown hook"); registerShutdownHook();*/ // Instantiate rabbit handler RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId); rabbitHandler.start(); LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")"); } private static void fixTppStaffBulks(boolean testMode, String odsCodeRegex) { LOG.info("Fixing TPP Staff Bulks using testMode " + testMode + " and regex " + odsCodeRegex); try { Set<String> hsNonPatientFiles = new HashSet<>(); hsNonPatientFiles.add("Ccg"); hsNonPatientFiles.add("Ctv3"); hsNonPatientFiles.add("Mapping"); hsNonPatientFiles.add("MappingGroup"); hsNonPatientFiles.add("ConfiguredListOption"); hsNonPatientFiles.add("Ctv3ToVersion2"); hsNonPatientFiles.add("Ctv3ToSnomed"); hsNonPatientFiles.add("Ctv3Hierarchy"); hsNonPatientFiles.add("ImmunisationContent"); hsNonPatientFiles.add("MedicationReadCodeDetails"); hsNonPatientFiles.add("Organisation"); hsNonPatientFiles.add("OrganisationBranch"); hsNonPatientFiles.add("Staff"); hsNonPatientFiles.add("StaffMemberProfile"); hsNonPatientFiles.add("StaffMember"); hsNonPatientFiles.add("StaffMemberProfileRole"); hsNonPatientFiles.add("Trust"); hsNonPatientFiles.add("Questionnaire"); hsNonPatientFiles.add("Template"); hsNonPatientFiles.add("Manifest"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { //check regex if (shouldSkipService(service, odsCodeRegex)) { continue; } //skip non-TPP if (service.getTags() == null || !service.getTags().containsKey("TPP")) { continue; } LOG.debug("Doing " + service); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { //find all exchanges ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemId, Integer.MAX_VALUE); for (Exchange exchange: exchanges) { //check if the exchange contains ONLY the non-patient files List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); if (files.isEmpty()) { continue; } boolean hasPatientFile = false; for (ExchangePayloadFile file: files) { String fileType = file.getType(); if (!hsNonPatientFiles.contains(fileType)) { hasPatientFile = true; } } //if we have a patient file in the exchange it wasn't affected by the bug if (hasPatientFile) { continue; } LOG.debug(" Exchange " + exchange.getId() + " only contains non-patient files"); //if the exchange only contains non-patient files, then we need to check the manifest file //to see if those files were bulks ExchangePayloadFile firstFile = files.get(0); File f = new File(firstFile.getPath()); //e.g. s3://<bucket>/<root>/sftpReader/TPP/YDDH3_07Y_GWR/2020-01-18T18.41.00/Split/E85697/SRCtv3Hierarchy.csv f = f.getParentFile(); //e.g. s3://<bucket>/<root>/sftpReader/TPP/YDDH3_07Y_GWR/2020-01-18T18.41.00/Split/E85697/ f = f.getParentFile(); //e.g. s3://<bucket>/<root>/sftpReader/TPP/YDDH3_07Y_GWR/2020-01-18T18.41.00/Split/ f = f.getParentFile(); //e.g. s3://<bucket>/<root>/sftpReader/TPP/YDDH3_07Y_GWR/2020-01-18T18.41.00/ f = new File(f, "SRManifest.csv"); String manifestPath = f.getAbsolutePath(); LOG.debug(" Checking manifest at " + manifestPath); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(manifestPath, TppCsvToFhirTransformer.ENCODING); CSVParser csvParser = new CSVParser(reader, TppCsvToFhirTransformer.CSV_FORMAT.withHeader()); Map<String, Boolean> hmManifestContents = new HashMap<>(); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String fileName = csvRecord.get("FileName"); String isDeltaStr = csvRecord.get("IsDelta"); if (isDeltaStr.equalsIgnoreCase("Y")) { hmManifestContents.put(fileName, Boolean.TRUE); } else if (isDeltaStr.equalsIgnoreCase("N")) { hmManifestContents.put(fileName, Boolean.FALSE); } else { //something wrong throw new Exception("Unexpected value [" + isDeltaStr + "] in " + manifestPath); } } } finally { csvParser.close(); } Boolean firstIsDelta = null; String firstFileName = null; for (ExchangePayloadFile file: files) { String name = FilenameUtils.getBaseName(file.getPath()); //the Manifest file doesn't contain itself or the SRMapping files //and the Mapping file is processed into publisher_common so we don't need to worry about copying //that to every split directory if (name.equals("SRManifest") || name.equals("SRMapping") || name.equals("SRMappingGroup")) { continue; } //the map doesn't contain file extensions Boolean isDelta = hmManifestContents.get(name); if (isDelta == null) { throw new Exception("Failed to find file " + name + " in SRManifest in " + manifestPath); } if (firstIsDelta == null) { firstIsDelta = isDelta; firstFileName = name; } else if (firstIsDelta.booleanValue() != isDelta.booleanValue()) { //if this file is different to a previous one, we don't have a way to handle this throw new Exception("Mis-match in delta state for non-patient files in " + manifestPath + " " + name + " isDelta = " + isDelta + " but " + firstFileName + " isDelta = " + firstIsDelta); } } //if all the files were bulk files then these non-patient were wrongly copied over to our //service from the bulk of another service so this exchange should not have been created if (firstIsDelta == null || firstIsDelta.booleanValue()) { continue; } LOG.debug(" Exchange " + exchange.getId() + " should not have been created"); if (testMode) { LOG.debug(" NOT FIXING AS TEST MODE"); continue; } //add header exchange.setHeaderAsBoolean(HeaderKeys.AllowQueueing, new Boolean(false)); //save exchange AuditWriter.writeExchange(exchange); } } } LOG.info("Finished Fixing TPP Staff Bulks"); } catch (Throwable t) { LOG.error("", t); } } private static void testDsm(String odsCode, String projectId) { LOG.info("Testing DSM for " + odsCode + " and project " + projectId); try { LOG.debug("Testing getAllPublishersForProjectWithSubscriberCheck"); List<String> results = ProjectCache.getAllPublishersForProjectWithSubscriberCheck(projectId, odsCode); LOG.debug("Got " + results); LOG.debug(""); LOG.debug(""); LOG.debug("Testing doesOrganisationHaveDPA"); Boolean b = OrganisationCache.doesOrganisationHaveDPA(odsCode); LOG.debug("Got " + b); LOG.debug(""); LOG.debug(""); LOG.debug("Testing getAllDSAsForPublisherOrg"); List<DataSharingAgreementEntity> list = DataSharingAgreementCache.getAllDSAsForPublisherOrg(odsCode); if (list == null) { LOG.debug("Got NULL"); } else { LOG.debug("Got " + list.size()); for (DataSharingAgreementEntity e: list) { LOG.debug(" -> " + e.getName() + " " + e.getUuid()); } } LOG.info("Finished Testing DSM for " + odsCode); } catch (Throwable t) { LOG.error("", t); } } private static void sendPatientsToSubscriber(String tableName) { LOG.info("Sending patients to subscriber from " + tableName); try { Connection conn = ConnectionManager.getEdsConnection(); String sql = "SELECT service_id, protocol_id, patient_id FROM " + tableName + " ORDER BY service_id, protocol_id"; PreparedStatement ps = conn.prepareStatement(sql); ps.setFetchSize(5000); List<UUID> batchPatientIds = new ArrayList<>(); UUID batchServiceId = null; UUID batchProtocolId = null; ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; UUID serviceId = UUID.fromString(rs.getString(col++)); UUID protocolId = UUID.fromString(rs.getString(col++)); UUID patientId = UUID.fromString(rs.getString(col++)); if (batchServiceId == null || batchProtocolId == null || !serviceId.equals(batchServiceId) || !protocolId.equals(batchProtocolId)) { //send any found previously if (!batchPatientIds.isEmpty()) { LOG.debug("Doing batch of " + batchPatientIds.size() + " for service " + batchServiceId + " and protocol " + batchProtocolId); QueueHelper.queueUpFullServiceForPopulatingSubscriber(batchServiceId, batchProtocolId, batchPatientIds); } batchServiceId = serviceId; batchProtocolId = protocolId; batchPatientIds = new ArrayList<>(); } batchPatientIds.add(patientId); } //do the remainder if (!batchPatientIds.isEmpty()) { LOG.debug("Doing batch of " + batchPatientIds.size() + " for service " + batchServiceId + " and protocol " + batchProtocolId); QueueHelper.queueUpFullServiceForPopulatingSubscriber(batchServiceId, batchProtocolId, batchPatientIds); } conn.close(); LOG.info("Finished sending patients to subscriber from " + tableName); } catch (Throwable t) { LOG.error("", t); } } /** * checks Services to see if any queued up exchange was not yet processed when a bulk subscriber load was started, * meaning that some data was not sent to that subscriber. Populates a table with IDs that can then be queued up * for sending * * tableName should be of a table with this schema: create table tmp.patients_to_requeue ( service_id char(36), protocol_id char(36), bulk_exchange_id char(36), patient_id char(36) ); */ private static void findMissedExchanges(String tableName, String odsCodeRegex) { LOG.info("Finding missed exchanges filtering on orgs using " + odsCodeRegex + ", storing results in " + tableName); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (shouldSkipService(service, odsCodeRegex)) { continue; } LOG.debug("Doing " + service); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { LOG.debug("Doing system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemId, Integer.MAX_VALUE); //go through exchanges and look for ones that were created for a bulk subscriber load for (int i=0; i<exchanges.size(); i++) { Exchange bulkExchange = exchanges.get(i); //if the exchange contains the header key to prevent re-queueing then it's possible //it's one for the bulk load boolean isBulkLoad = false; Boolean allowRequeuing = bulkExchange.getHeaderAsBoolean(HeaderKeys.AllowQueueing); if (allowRequeuing != null && !allowRequeuing.booleanValue()) { List<ExchangeEvent> events = exchangeDal.getExchangeEvents(bulkExchange.getId()); for (ExchangeEvent event: events) { String eventDesc = event.getEventDesc(); //note weird text check to handle the two versions of this message used if (eventDesc.contains("reated exchange to populate subscribers in protocol")) { isBulkLoad = true; LOG.debug("Bulk load found in exchange " + bulkExchange.getId() + " on " + bulkExchange.getTimestamp() + ": " + eventDesc); break; } } } if (!isBulkLoad) { continue; } //if this exchange is a bulk load, then we need to check any exchanges received BEFORE it //that didn't contain the protocol in their headers were 100% finished with their inbound //transform before the bulk load String[] protocolIds = bulkExchange.getHeaderAsStringArray(HeaderKeys.ProtocolIds); if (protocolIds.length != 1) { throw new Exception("Bulk Exchange " + bulkExchange.getId() + " has " + protocolIds.length + " protocol IDs in its header"); } String protocolId = protocolIds[0]; Date dtBulk = bulkExchange.getTimestamp(); Set<UUID> patientsToFix = new HashSet<>(); for (int j=i+1; j<exchanges.size(); j++) { Exchange priorExchange = exchanges.get(j); //skip any other special exchanges that are for bulk loads etc Boolean priorAllowRequeuing = priorExchange.getHeaderAsBoolean(HeaderKeys.AllowQueueing); if (priorAllowRequeuing != null && !priorAllowRequeuing.booleanValue()) { continue; } //skip any where the header contains the same protocol ID, as this data will have gone //to the subscriber anyway boolean hadSameProtocol = false; String[] priorProtocolIds = priorExchange.getHeaderAsStringArray(HeaderKeys.ProtocolIds); if (priorProtocolIds == null) { throw new Exception("Null protocol IDs for exchange " + priorExchange.getId()); } for (String priorProtocolId: priorProtocolIds) { if (priorProtocolId.equals(protocolId)) { hadSameProtocol = true; } } if (hadSameProtocol) { continue; } //skip any that didn't actually transform any dta ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(priorExchange.getId()); if (batches.isEmpty()) { continue; } List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(service.getId(), systemId, priorExchange.getId()); if (transformAudits.isEmpty()) { throw new Exception("No transform audits for exchange " + priorExchange.getId()); } ExchangeTransformAudit firstTransformAudit = null; for (ExchangeTransformAudit transformAudit: transformAudits) { if (transformAudit.getEnded() != null) { firstTransformAudit = transformAudit; break; } } if (firstTransformAudit == null) { throw new Exception("No finished transform audit found for exchange " + priorExchange.getId()); } Date dtTransform = firstTransformAudit.getEnded(); if (dtTransform.before(dtBulk)) { //if the transform finished before the bulk, then we're OK and don't need to look at any more exchanges break; } //if the transform didn't finish until AFTER the bulk was started, then this exchange's data //won't have gone to the subscriber LOG.debug("Exchange " + priorExchange.getId() + " finished transform on " + dtTransform + " so missed going to subscriber"); for (ExchangeBatch b: batches) { UUID patientId = b.getEdsPatientId(); if (patientId != null) { patientsToFix.add(patientId); } } LOG.debug("Found " + batches.size() + " batches, patients to fix = " + patientsToFix.size()); } LOG.debug("Found total " + patientsToFix.size() + " patients to fix"); //save the list of patients to a table if (!patientsToFix.isEmpty()) { Connection conn = ConnectionManager.getEdsConnection(); PreparedStatement ps = conn.prepareStatement("INSERT INTO " + tableName + " VALUES (?, ?, ?, ?)"); for (UUID patientId : patientsToFix) { int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, protocolId); ps.setString(col++, bulkExchange.getId().toString()); ps.setString(col++, patientId.toString()); ps.addBatch(); } ps.executeBatch(); conn.commit(); ps.close(); conn.close(); } } } } LOG.info("Finished finding missed exchanges"); } catch (Throwable t) { LOG.error("", t); } } private static boolean shouldSkipService(Service service, String odsCodeRegex) { if (odsCodeRegex == null) { return false; } String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); return true; } return false; } private static void createDeleteZipsForSubscriber(int batchSize, String sourceTable, int subscriberId) { LOG.info("Create Zips For Subscriber from " + sourceTable + " subscriberId " + subscriberId + " and batchSize " + batchSize); try { Connection conn = ConnectionManager.getEdsNonPooledConnection(); String sql = "SELECT enterprise_id FROM " + sourceTable + " WHERE done = 0 AND subscriber_id = ? LIMIT " + batchSize; PreparedStatement psSelect = conn.prepareStatement(sql); sql = "UPDATE " + sourceTable + " SET done = 1 WHERE enterprise_id = ?"; PreparedStatement psDone = conn.prepareStatement(sql); int batchesDone = 0; int idsDone = 0; while (true) { List<Long> ids = new ArrayList<>(); psSelect.setInt(1, subscriberId); ResultSet rs = psSelect.executeQuery(); while (rs.next()) { long id = rs.getLong(1); ids.add(new Long(id)); } if (ids.isEmpty()) { break; } OutputContainer container = new OutputContainer(); org.endeavourhealth.transform.subscriber.targetTables.Observation obsWriter = container.getObservations(); for (Long id: ids) { SubscriberId idWrapper = new SubscriberId(SubscriberTableId.OBSERVATION.getId(), id.longValue(), null, null); obsWriter.writeDelete(idWrapper); } byte[] bytes = container.writeToZip(); String base64 = Base64.getEncoder().encodeToString(bytes); SubscriberZipFileUUIDsDalI szfudi = DalProvider.factorySubscriberZipFileUUIDs(); szfudi.createSubscriberZipFileUUIDsEntity(subscriberId, UUID.randomUUID().toString(), UUID.randomUUID().toString(), base64); //update the table to say done batchesDone ++; for (Long id: ids) { psDone.setLong(1, id.longValue()); psDone.addBatch(); idsDone ++; } psDone.executeBatch(); conn.commit(); LOG.debug("Done " + batchesDone + ", total = " + idsDone); if (ids.size() < batchSize) { break; } } psSelect.close(); psDone.close(); conn.close(); LOG.debug("Finished at " + batchesDone + ", total = " + idsDone); LOG.info("Finished Create Zips For Subscriber"); } catch (Throwable t) { LOG.error("", t); } } /*private static void testJmx() { LOG.info("Testing JMX"); try { LOG.debug("----OperatingSystemMXBean--------------------------------"); OperatingSystemMXBean osb = ManagementFactory.getOperatingSystemMXBean(); LOG.debug("getName = " + osb.getName()); LOG.debug("getSystemLoadAverage = " + osb.getSystemLoadAverage()); LOG.debug("getArch = " + osb.getArch()); LOG.debug("getVersion = " + osb.getVersion()); LOG.debug("getAvailableProcessors = " + osb.getAvailableProcessors()); LOG.debug("----MemoryMXBean--------------------------------"); MemoryMXBean mb = ManagementFactory.getMemoryMXBean(); LOG.debug("getNonHeapMemoryUsage = " + mb.getNonHeapMemoryUsage()); LOG.debug("getHeapMemoryUsage = " + mb.getHeapMemoryUsage()); LOG.debug("getObjectPendingFinalizationCount = " + mb.getObjectPendingFinalizationCount()); LOG.debug("----MemoryMXBean--------------------------------"); com.sun.management.OperatingSystemMXBean sosb = (com.sun.management.OperatingSystemMXBean)ManagementFactory.getOperatingSystemMXBean(); LOG.debug("getSystemCpuLoad = " + sosb.getSystemCpuLoad()); LOG.debug("getTotalPhysicalMemorySize = " + sosb.getTotalPhysicalMemorySize()); LOG.info("Finished Testing JMX"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testDatabases(String odsCodesStr, String subscriberConfigNamesStr) { LOG.info("Testing all databases"); try { String[] odsCodes = odsCodesStr.split("\\|"); String[] subscriberConfigNames = subscriberConfigNamesStr.split("\\|"); for (String odsCode: odsCodes) { LOG.debug("---------------------------------------------------------------"); LOG.debug("Doing " + odsCode); //admin LOG.debug("Doing admin"); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); Service service = serviceDalI.getByLocalIdentifier(odsCode); LOG.debug("Admin test " + service); UUID serviceId = service.getId(); //EDS LOG.debug("Doing EDS"); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientIds = patientSearchDal.getPatientIds(serviceId); LOG.debug("EDS test = " + patientIds.size()); PatientLinkDalI patientLinkDalI = DalProvider.factoryPatientLinkDal(); List<PatientLinkPair> changes = patientLinkDalI.getChangesSince(new Date()); LOG.debug("EDS (hibernate) test = " + changes.size()); //reference LOG.debug("Doing reference"); String snomedTerm = TerminologyService.lookupSnomedTerm("10000006"); LOG.debug("Reference test = " + snomedTerm); //HL7 Receiver LOG.debug("Doing HL7 Receiver"); Hl7ResourceIdDalI hl7ResourceIdDal = DalProvider.factoryHL7ResourceDal(); ResourceId id = hl7ResourceIdDal.getResourceId("B", "Patient", "PIdAssAuth=2.16.840.1.113883.3.2540.1-PatIdValue=N7619764"); LOG.debug("HL7 receiver test = " + id); //audit LOG.debug("Doing audit"); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 100); LOG.debug("Audit test " + exchanges.size()); //publisher common LOG.debug("Doing publisher common"); EmisTransformDalI emisTransformDalI = DalProvider.factoryEmisTransformDal(); EmisCsvCodeMap codeMap = emisTransformDalI.getCodeMapping(false, 654010L); LOG.debug("Publisher common test " + codeMap); boolean wasAdminApplied = emisTransformDalI.wasAdminCacheApplied(serviceId); LOG.debug("Publisher common (hibernate) test " + wasAdminApplied); //sftp reader LOG.debug("Doing SFTP reader"); EntityManager entityManager = ConnectionManager.getSftpReaderEntityManager(); PreparedStatement ps = null; SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = null; if (ConnectionManager.isPostgreSQL(connection)) { sql = "SELECT instance_name FROM configuration.instance ORDER BY instance_name"; } else { sql = "SELECT instance_name FROM instance ORDER BY instance_name"; } ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); rs.next(); LOG.debug("SFTP Reader test " + rs.getString(1)); ps.close(); entityManager.close(); //publisher transform LOG.debug("Doing publisher transform"); ResourceIdTransformDalI resourceIdTransformDalI = DalProvider.factoryResourceIdTransformDal(); List<Reference> references = new ArrayList<>(); UUID patientId = patientIds.get(0); references.add(ReferenceHelper.createReference(ResourceType.Patient, patientId.toString())); Map<Reference, Reference> map = resourceIdTransformDalI.findSourceReferencesFromEdsReferences(serviceId, references); LOG.debug("publisher transform done " + map); //ehr LOG.debug("Doing EHR"); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceId, ResourceType.Patient.toString(), patientId); LOG.debug("EHR done " + (wrapper != null)); for (String subscriberConfigName: subscriberConfigNames) { //subscriber transform LOG.debug("Doing subscriber transform " + subscriberConfigName); SubscriberOrgMappingDalI subscriberOrgMappingDalI = DalProvider.factorySubscriberOrgMappingDal(subscriberConfigName); Long enterpriseId = subscriberOrgMappingDalI.findEnterpriseOrganisationId(serviceId.toString()); LOG.debug("Subscriber transform on " + subscriberConfigName + " done " + enterpriseId); //subscriber LOG.debug("Doing subscribers from " + subscriberConfigName); List<EnterpriseConnector.ConnectionWrapper> subscriberConnections = EnterpriseConnector.openConnection(subscriberConfigName); for (EnterpriseConnector.ConnectionWrapper subscriberConnection : subscriberConnections) { Connection connection1 = subscriberConnection.getConnection(); sql = "SELECT name FROM organization WHERE id = ?"; ps = connection1.prepareStatement(sql); if (enterpriseId != null) { ps.setLong(1, enterpriseId); } else { //if no ID found, just use a substitute number ps.setLong(1, 999); } rs = ps.executeQuery(); String orgId = null; if (rs.next()) { orgId = rs.getString(1); } LOG.debug("subscriber on " + subscriberConfigName + " (" + subscriberConnection.toString() + ") done " + orgId); ps.close(); connection1.close(); } } *//* FhirAudit("db_fhir_audit", true, "FhirAuditDb"), PublisherStaging("db_publisher_staging", false, "PublisherStagingDb"), DataGenerator("db_data_generator", true, "DataGeneratorDb"), *//* } LOG.info("Finished testing all databases"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEmisSnomedCodes(String odsCodeRegex) { LOG.info("Finished Fixing Emis Snomed codes for orgs " + odsCodeRegex); try { //find affected Code IDs LOG.info("Finding affected code IDs"); Set<Long> codeIds = new HashSet<>(); Map<Long, EmisCsvCodeMap> hmCodeCache = new HashMap<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisTransformDalI mappingRepository = DalProvider.factoryEmisTransformDal(); EntityManager publisherCommonEntityManager = ConnectionManager.getPublisherCommonEntityManager(); SessionImpl publisherCommonSession = (SessionImpl)publisherCommonEntityManager.getDelegate(); Connection publisherCommonConnection = publisherCommonSession.connection(); String sql = "SELECT code_id FROM emis_csv_code_map WHERE medication = false and read_code like '%-%'"; PreparedStatement ps = publisherCommonConnection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { long codeId = rs.getLong(1); codeIds.add(new Long(codeId)); } ps.close(); publisherCommonEntityManager.close(); LOG.info("Found " + codeIds.size() + " affected code IDs"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (odsCodeRegex != null) { String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } } LOG.info("-----------------------------------------------------------------"); LOG.info("Doing " + service); List<UUID> systems = findSystemIds(service); for (UUID systemId: systems) { LibraryItem libraryItem = LibraryRepositoryHelper.getLibraryItem(systemId); if (!libraryItem.getName().toUpperCase().contains("EMIS")) { LOG.info("Skipping system " + libraryItem.getName()); continue; } LOG.info("Doing system ID " + libraryItem.getName()); Set<String> hsObservationsDone = new HashSet<>(); Set<String> hsDiariesDone = new HashSet<>(); Set<String> hsConsultationsDone = new HashSet<>(); Set<String> hsSlotsDone = new HashSet<>(); EmisCsvHelper helper = new EmisCsvHelper(service.getId(), systemId, null, null, null); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); int fixed = 0; int found = 0; int exchangesDone = 0; for (Exchange exchange: exchanges) { exchangesDone ++; if (exchangesDone % 30 == 0) { LOG.info("Done " + exchangesDone + " of " + exchanges.size() + " exchanges"); } List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); if (files.isEmpty() || files.size() == 1) { //custom extract continue; } if (!EmisCsvToFhirTransformer.shouldProcessPatientData(helper)) { continue; } ExchangePayloadFile observationFile = findFileOfType(files, "CareRecord_Observation"); if (observationFile != null) { LOG.debug("Doing " + observationFile.getPath()); int obsRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(observationFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { obsRecordsDone ++; if (obsRecordsDone % 1000 == 0) { LOG.info("Done " + obsRecordsDone + " records"); } CSVRecord record = iterator.next(); String observationGuid = record.get("ObservationGuid"); if (hsObservationsDone.contains(observationGuid)) { continue; } hsObservationsDone.add(observationGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("CodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); Set<ResourceType> resourceTypes = ObservationTransformer.findOriginalTargetResourceTypes(helper, patientCell, observationCell); for (ResourceType resourceType : resourceTypes) { String sourceId = EmisCsvHelper.createUniqueId(patientCell, observationCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), resourceType, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), resourceType.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; if (resourceType == ResourceType.Condition) { Condition condition = (Condition) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(condition.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Procedure) { Procedure procedure = (Procedure) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(procedure.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.AllergyIntolerance) { AllergyIntolerance allergyIntolerance = (AllergyIntolerance) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(allergyIntolerance.getSubstance()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.FamilyMemberHistory) { FamilyMemberHistory familyMemberHistory = (FamilyMemberHistory) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(familyMemberHistory.getCondition().get(0).getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Immunization) { Immunization immunization = (Immunization) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(immunization.getVaccineCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.DiagnosticOrder) { DiagnosticOrder diagnosticOrder = (DiagnosticOrder) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(diagnosticOrder.getItem().get(0).getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Specimen) { Specimen specimen = (Specimen) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(specimen.getType()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.DiagnosticReport) { DiagnosticReport spediagnosticReportimen = (DiagnosticReport) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(spediagnosticReportimen.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.ReferralRequest) { ReferralRequest referralRequest = (ReferralRequest) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(referralRequest.getServiceRequested().get(0)); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Observation) { Observation observation = (Observation) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(observation.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else { throw new Exception("Unexpected resource type " + resourceType + " for ID " + uuid); } String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } } parser.close(); } ExchangePayloadFile diaryFile = findFileOfType(files, "CareRecord_Diary"); if (diaryFile != null) { LOG.debug("Doing " + diaryFile.getPath()); int diaryRecords = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(diaryFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { diaryRecords ++; if (diaryRecords % 1000 == 0) { LOG.info("Done " + diaryRecords + " records"); } CSVRecord record = iterator.next(); String diaryGuid = record.get("DiaryGuid"); if (hsDiariesDone.contains(diaryGuid)) { continue; } hsDiariesDone.add(diaryGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("CodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell diaryCell = CsvCell.factoryDummyWrapper(diaryGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, diaryCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.ProcedureRequest, sourceId); if (uuid == null) { continue; } //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.ProcedureRequest.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; ProcedureRequest procedureRequest = (ProcedureRequest) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(procedureRequest.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } ExchangePayloadFile consultationFile = findFileOfType(files, "CareRecord_Consultation"); if (consultationFile != null) { LOG.debug("Doing " + consultationFile.getPath()); int consultationRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(consultationFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { consultationRecordsDone ++; if (consultationRecordsDone % 1000 == 0) { LOG.info("Done " + consultationRecordsDone + " records"); } CSVRecord record = iterator.next(); String consultationGuid = record.get("ConsultationGuid"); if (hsConsultationsDone.contains(consultationGuid)) { continue; } hsConsultationsDone.add(consultationGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(codeIdStr)) { continue; } Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell consultationCell = CsvCell.factoryDummyWrapper(consultationGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, consultationCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.Encounter, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.Encounter.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; Encounter encounter = (Encounter) resource; Extension extension = ExtensionConverter.findExtension(encounter, FhirExtensionUri.ENCOUNTER_SOURCE); if (extension == null || !extension.hasValue()) { continue; } CodeableConcept codeableConcept = (CodeableConcept)extension.getValue(); Coding coding = ObservationCodeHelper.findOriginalCoding(codeableConcept); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } ExchangePayloadFile slotFile = findFileOfType(files, "Appointment_slot"); if (slotFile != null) { LOG.debug("Doing " + slotFile.getPath()); int slotRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(slotFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { slotRecordsDone ++; if (slotRecordsDone % 1000 == 0) { LOG.info("Done " + slotRecordsDone + " records"); } CSVRecord record = iterator.next(); String slotGuid = record.get("SlotGuid"); if (hsSlotsDone.contains(slotGuid)) { continue; } hsSlotsDone.add(slotGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("DnaReasonCodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell slotCell = CsvCell.factoryDummyWrapper(slotGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, slotCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.Appointment, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.Appointment.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; Appointment encounter = (Appointment) resource; Extension extension = ExtensionConverter.findExtension(encounter, FhirExtensionUri.APPOINTMENT_DNA_REASON_CODE); if (extension == null || !extension.hasValue()) { continue; } CodeableConcept codeableConcept = (CodeableConcept)extension.getValue(); Coding coding = ObservationCodeHelper.findOriginalCoding(codeableConcept); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } } LOG.info("Done " + exchangesDone + " of " + exchanges.size() + " exchanges"); LOG.info("Found " + found + " records and fixed " + fixed); } } LOG.info("Finished Fixing Emis Snomed codes for orgs " + odsCodeRegex); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void populatePatientSearchEpisodeOdsCode() { LOG.info("Populating Patient Search Episode ODS Codes"); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); int done = 0; String sql = "SELECT service_id, patient_id, episode_id FROM tmp.patient_search_episode_tmp WHERE done = 0"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setFetchSize(1000); ResultSet rs = ps.executeQuery(); while (rs.next()) { String serviceId = rs.getString(1); String patientId = rs.getString(2); String episodeId = rs.getString(3); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EpisodeOfCare episodeOfCare = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.EpisodeOfCare, episodeId); if (episodeOfCare != null && episodeOfCare.hasManagingOrganization()) { Reference orgReference = episodeOfCare.getManagingOrganization(); ReferenceComponents comps = org.endeavourhealth.common.fhir.ReferenceHelper.getReferenceComponents(orgReference); ResourceType type = comps.getResourceType(); String id = comps.getId(); resourceDal = DalProvider.factoryResourceDal(); Organization org = (Organization)resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), type, id); if (org != null) { String orgOdsCode = IdentifierHelper.findOdsCode(org); EntityManager edsEntityManager2 = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession2 = (SessionImpl)edsEntityManager2.getDelegate(); Connection edsConnection2 = edsSession2.connection(); sql = "UPDATE patient_search_episode SET ods_code = ? WHERE service_id = ? AND patient_id = ? AND episode_id = ?"; PreparedStatement ps2 = edsConnection2.prepareStatement(sql); edsEntityManager2.getTransaction().begin(); ps2.setString(1, orgOdsCode); ps2.setString(2, serviceId); ps2.setString(3, patientId); ps2.setString(4, episodeId); ps2.executeUpdate(); edsEntityManager2.getTransaction().commit(); ps2.close(); edsEntityManager2.close(); } } EntityManager edsEntityManager2 = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession2 = (SessionImpl)edsEntityManager2.getDelegate(); Connection edsConnection2 = edsSession2.connection(); sql = "UPDATE tmp.patient_search_episode_tmp SET done = ? WHERE service_id = ? AND patient_id = ? AND episode_id = ?"; PreparedStatement ps2 = edsConnection2.prepareStatement(sql); edsEntityManager2.getTransaction().begin(); ps2.setBoolean(1, true); ps2.setString(2, serviceId); ps2.setString(3, patientId); ps2.setString(4, episodeId); ps2.executeUpdate(); edsEntityManager2.getTransaction().commit(); ps2.close(); edsEntityManager2.close(); done ++; if (done % 100 == 0) { LOG.debug("Done " + done); } } rs.close(); ps.close(); LOG.debug("Done " + done); LOG.info("Finished Populating Patient Search Episode ODS Codes"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisDrugRecords(String odsCodeRegex) { LOG.info("Fixing Emis drug records"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (odsCodeRegex != null) { String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } } //check if Emis String notes = service.getNotes(); if (notes == null || !notes.contains("CDB")) { LOG.info("Skipping as not Emis: " + service); continue; } LOG.info("Doing " + service); List<UUID> systems = findSystemIds(service); for (UUID systemId: systems) { LOG.info("Doing system ID " + systemId); LOG.info("Finding patients"); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientIds = patientSearchDal.getPatientIds(service.getId()); LOG.info("Found " + patientIds.size() + " patients"); //create dummy exchange String bodyJson = JsonSerializer.serialize(new ArrayList<ExchangePayloadFile>()); String odsCode = service.getLocalId(); Exchange exchange = null; UUID exchangeId = UUID.randomUUID(); List<UUID> batchIdsCreated = new ArrayList<>(); FhirResourceFiler filer = new FhirResourceFiler(exchangeId, service.getId(), systemId, new TransformError(), batchIdsCreated); int done = 0; for (UUID patientId : patientIds) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<ResourceWrapper> statementWrappers = resourceDal.getResourcesByPatient(service.getId(), patientId, ResourceType.MedicationStatement.toString()); List<ResourceWrapper> orderWrappers = null; //get on demand for (ResourceWrapper statementWrapper : statementWrappers) { MedicationStatement medicationStatement = (MedicationStatement) statementWrapper.getResource(); if (!medicationStatement.hasStatus()) { continue; } MedicationStatementBuilder builder = new MedicationStatementBuilder(medicationStatement); boolean fixed = false; Date cancellationDate = null; Extension outerExtension = ExtensionConverter.findExtension(medicationStatement, FhirExtensionUri.MEDICATION_AUTHORISATION_CANCELLATION); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, "date"); if (innerExtension != null) { DateType dt = (DateType) innerExtension.getValue(); cancellationDate = dt.getValue(); } } if (medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE) { //if active then ensure there's no cancellation date if (cancellationDate != null) { builder.setCancellationDate(null); fixed = true; } } else if (medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.COMPLETED) { //if non-active, then ensure there IS a cancellation date if (cancellationDate == null) { IssueRecordIssueDate mostRecentDate = null; Reference medicationStatementReference = ReferenceHelper.createReferenceExternal(medicationStatement); if (orderWrappers == null) { orderWrappers = resourceDal.getResourcesByPatient(service.getId(), patientId, ResourceType.MedicationOrder.toString()); } for (ResourceWrapper orderWrapper : orderWrappers) { //quick check against the raw JSON so we don't have to deserialise the bulk of them String orderJson = orderWrapper.getResourceData(); if (!orderJson.contains(medicationStatementReference.getReference())) { continue; } MedicationOrder order = (MedicationOrder) orderWrapper.getResource(); MedicationOrderBuilder medicationOrderBuilder = new MedicationOrderBuilder(order); Reference reference = medicationOrderBuilder.getMedicationStatementReference(); if (reference != null && ReferenceHelper.equals(reference, medicationStatementReference)) { DateTimeType started = medicationOrderBuilder.getDateWritten(); Integer duration = medicationOrderBuilder.getDurationDays(); IssueRecordIssueDate obj = new IssueRecordIssueDate(started, duration); if (obj.afterOrOtherIsNull(mostRecentDate)) { mostRecentDate = obj; } } } //if no issues exist for it, use the start date of the DrugRecord if (mostRecentDate == null) { Date d = medicationStatement.getDateAsserted(); mostRecentDate = new IssueRecordIssueDate(new DateTimeType(d), new Integer(0)); } Date d = mostRecentDate.getIssueDateType().getValue(); int duration = 0; Integer intObj = mostRecentDate.getIssueDuration(); if (intObj != null) { duration = intObj.intValue(); } Calendar cal = Calendar.getInstance(); cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, duration); cancellationDate = cal.getTime(); builder.setCancellationDate(cancellationDate); fixed = true; } } else { LOG.error("Unexpected status " + medicationStatement.getStatus() + " on resource " + statementWrapper); } if (fixed) { if (exchange == null) { exchange = new Exchange(); exchange.setId(exchangeId); exchange.setBody(bodyJson); exchange.setTimestamp(new Date()); exchange.setHeaders(new HashMap<>()); exchange.setHeaderAsUuid(HeaderKeys.SenderServiceUuid, service.getId()); exchange.setHeader(HeaderKeys.ProtocolIds, ""); //just set to non-null value, so postToExchange(..) can safely recalculate exchange.setHeader(HeaderKeys.SenderLocalIdentifier, odsCode); exchange.setHeaderAsUuid(HeaderKeys.SenderSystemUuid, systemId); exchange.setHeader(HeaderKeys.SourceSystem, MessageFormat.EMIS_CSV); exchange.setServiceId(service.getId()); exchange.setSystemId(systemId); AuditWriter.writeExchange(exchange); AuditWriter.writeExchangeEvent(exchange, "Manually created to re-process Emis DrugRecord data"); } //save resource filer.savePatientResource(null, false, builder); } } done++; if (done % 100 == 0) { LOG.info("Done " + done + " patients"); } } LOG.info("Done " + done + " patients"); //close down filer filer.waitToFinish(); if (exchange != null) { //set multicast header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post to Rabbit protocol queue List<UUID> exchangeIds = new ArrayList<>(); exchangeIds.add(exchange.getId()); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, true); } } } LOG.info("Finished Fixing Emis drug records"); } catch (Throwable t) { LOG.error("", t); } }*/ /** * populates the pseudo_id table on a new-style subscriber DB (MySQL or SQL Server) with pseudo_ids generated * from a salt */ /*private static void populateSubscriberPseudoId(String subscriberConfigName, String saltKeyName) { LOG.info("Populating subscriber DB pseudo ID for " + subscriberConfigName + " using " + saltKeyName); try { //find salt details JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); JsonNode linkDistributorsNode = config.get("pseudo_salts"); if (linkDistributorsNode == null) { throw new Exception("No pseudo_salts found in config"); } ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(linkDistributorsNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig[] arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); LinkDistributorConfig saltConfig = null; for (LinkDistributorConfig l : arr) { if (l.getSaltKeyName().equals(saltKeyName)) { saltConfig = l; } } if (saltConfig == null) { throw new Exception("No salt config found for " + saltKeyName); } String sql = "SELECT source_id, subscriber_id" + " FROM subscriber_id_map" + " WHERE source_id LIKE '" + ResourceType.Patient.toString() + "%'" + " AND subscriber_table = " + SubscriberTableId.PATIENT.getId(); Map<String, Long> hmPatients = new HashMap<>(); EntityManager subscriberTransformEntityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfigName); SessionImpl subscriberTransformSession = (SessionImpl)subscriberTransformEntityManager.getDelegate(); Connection subscriberTransformConnection = subscriberTransformSession.connection(); PreparedStatement ps = subscriberTransformConnection.prepareStatement(sql); ps.setFetchSize(1000); LOG.info("Running query to find patients"); ResultSet rs = ps.executeQuery(); while (rs.next()) { String sourceId = rs.getString(1); Long subscriberId = rs.getLong(2); hmPatients.put(sourceId, subscriberId); if (hmPatients.size() % 5000 == 0) { LOG.info("Found " + hmPatients.size()); } } ps.close(); subscriberTransformEntityManager.clear(); LOG.info("Query done, found " + hmPatients.size() + " patients"); int done = 0; int skipped = 0; File fixFile = new File("FIX_" + subscriberConfigName + "_" + saltKeyName + ".sql"); PrintWriter fixWriter = new PrintWriter(new BufferedWriter(new FileWriter(fixFile))); File errorFile = new File("ERRORS_" + subscriberConfigName + "_" + saltKeyName + ".txt"); PrintWriter errorWriter = new PrintWriter(new BufferedWriter(new FileWriter(errorFile))); LOG.info("Starting to process patients"); String fixSql = "DROP TABLE IF EXISTS pseudo_id_tmp;"; fixWriter.println(fixSql); fixSql = "CREATE TABLE pseudo_id_tmp (id bigint, patient_id bigint, salt_key_name varchar(50), pseudo_id varchar(255));"; fixWriter.println(fixSql); List<String> batch = new ArrayList<>(); for (String sourceId: hmPatients.keySet()) { Long subscriberId = hmPatients.get(sourceId); Reference ref = ReferenceHelper.createReference(sourceId); String patientUuidStr = ReferenceHelper.getReferenceId(ref); UUID patientUuid = UUID.fromString(patientUuidStr); //need to find the service ID PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); PatientSearch patientSearch = patientSearchDal.searchByPatientId(patientUuid); if (patientSearch == null) { errorWriter.println("Failed to find patient search record for " + sourceId + " with subscriber ID " + subscriberId); skipped ++; continue; } //find current FHIR patient UUID serviceId = patientSearch.getServiceId(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientUuidStr); if (patient == null) { errorWriter.println("Null FHIR Patient for " + sourceId + " with subscriber ID " + subscriberId); skipped ++; continue; } String pseudoId = PseudoIdBuilder.generatePsuedoIdFromConfig(subscriberConfigName, saltConfig, patient); //need to store in our pseudo ID mapping table if (pseudoId != null) { PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfigName); pseudoIdDal.saveSubscriberPseudoId(patientUuid, subscriberId.longValue(), saltKeyName, pseudoId); String pseudoIdRowSourceId = ReferenceHelper.createReferenceExternal(patient).getReference() + PatientTransformer.PREFIX_PSEUDO_ID + saltKeyName; SubscriberResourceMappingDalI enterpriseIdDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); SubscriberId pseudoIdRowId = enterpriseIdDal.findOrCreateSubscriberId(SubscriberTableId.PSEUDO_ID.getId(), pseudoIdRowSourceId); batch.add("(" + pseudoIdRowId.getSubscriberId() + ", " + subscriberId + ", '" + saltKeyName + "', '" + pseudoId + "')"); if (batch.size() >= 50) { fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES " + String.join(", ", batch) + ";"; fixWriter.println(fixSql); batch.clear(); } //fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES (" + pseudoIdRowId.getSubscriberId() + ", " + subscriberId + ", '" + saltKeyName + "', '" + pseudoId + "');"; //fixWriter.println(fixSql); } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + ", skipped " + skipped); } } if (!batch.isEmpty()) { fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES " + String.join(", ", batch) + ";"; fixWriter.println(fixSql); } fixSql = "CREATE INDEX ix ON pseudo_id_tmp (patient_id);"; fixWriter.println(fixSql); fixSql = "DELETE FROM pseudo_id WHERE salt_key_name = '" + saltKeyName + "';"; fixWriter.println(fixSql); fixSql = "INSERT INTO pseudo_id SELECT t.id, t.patient_id, t.salt_key_name, t.pseudo_id FROM pseudo_id_tmp t INNER JOIN patient p ON p.id = t.patient_id;"; fixWriter.println(fixSql); fixSql = "DROP TABLE pseudo_id_tmp;"; fixWriter.println(fixSql); fixWriter.close(); errorWriter.close(); LOG.info("Finished Populating subscriber DB pseudo ID for " + subscriberConfigName + " using " + saltKeyName); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void investigateMissingPatients(String nhsNumberFile, String protocolName, String subscriberConfigName, String ccgCodeRegex) { LOG.info("Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); String salt = null; JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); ArrayNode linked = (ArrayNode)config.get("linkedDistributors"); for (int i=0; i<linked.size(); i++) { JsonNode linkedElement = linked.get(i); String name = linkedElement.get("saltKeyName").asText(); if (name.equals("EGH")) { salt = linkedElement.get("salt").asText(); } } //go through file and check File inputFile = new File(nhsNumberFile); if (!inputFile.exists()) { throw new Exception(nhsNumberFile + " doesn't exist"); } List<String> nhsNumbers = Files.readAllLines(inputFile.toPath()); LOG.info("Found " + nhsNumbers.size()); String fileName = FilenameUtils.getBaseName(nhsNumberFile); File outputCsvFile = new File("OUTPUT_" + fileName + ".csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputCsvFile)); CSVPrinter outputPrinter = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader("nhs_number", "pseudo_id", "finding", "comment")); File outputTextFile = new File("OUTPUT_" + nhsNumberFile); List<String> lines = new ArrayList<>(); for (String nhsNumber: nhsNumbers) { LOG.debug("Doing " + nhsNumber); PseudoIdBuilder b = new PseudoIdBuilder(subscriberConfigName, "EGH", salt); b.addValueNhsNumber("NhsNumber", nhsNumber, null); String calcPseudoId = b.createPseudoId(); String finding = null; String comment = null; lines.add(">>>>>>>>> " + nhsNumber + " <<<<<<<<<<"); EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); String sql = "select patient_id, service_id, local_id, ccg_code" + " from eds.patient_link_person p" + " inner join eds.patient_link_history h" + " on h.new_person_id = p.person_id" + " inner join admin.service s" + " on s.id = service_id" + " where nhs_number = ?" + " and organisation_type = 'PR'"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); List<PatientInfo> patientInfos = new ArrayList<>(); //LOG.debug(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { PatientInfo info = new PatientInfo(); info.patientUuid = rs.getString(1); info.serviceUuid = rs.getString(2); info.odsCode = rs.getString(3); info.ccgCode = rs.getString(4); patientInfos.add(info); } ps.close(); edsEntityManager.close(); //check to see if the patient does exist in the CCG but has been deleted or had their NHS number changed for (PatientInfo info: patientInfos) { lines.add("Found " + info); if (!Pattern.matches(ccgCodeRegex, info.ccgCode)) { lines.add("Ignoring as out of CCG area"); continue; } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<ResourceWrapper> history = resourceDal.getResourceHistory(UUID.fromString(info.serviceUuid), ResourceType.Patient.toString(), UUID.fromString(info.patientUuid)); if (history.isEmpty()) { lines.add("No history found for patient"); finding = "ERROR"; comment = "Couldn't find FHIR resource history"; continue; } ResourceWrapper current = history.get(0); if (current.isDeleted()) { lines.add("Patient resource is deleted"); *//*finding = "Deleted"; comment = "Patient record has been deleted from DDS";*//* continue; } Patient currentFhir = (Patient) current.getResource(); String currentNhsNumber = IdentifierHelper.findNhsNumber(currentFhir); lines.add("Current NHS number = " + currentNhsNumber); if (!currentNhsNumber.equals(nhsNumber)) { boolean nhsNumberChanged = false; for (int i=1; i<history.size(); i++) { ResourceWrapper wrapper = history.get(i); if (wrapper.isDeleted()) { continue; } Patient past = (Patient) wrapper.getResource(); String pastNhsNumber = IdentifierHelper.findNhsNumber(past); lines.add("History " + i + " has NHS number " + pastNhsNumber); if (pastNhsNumber != null && pastNhsNumber.equals(nhsNumber)) { ResourceWrapper wrapperChanged = history.get(i-1); String changedNhsNumber = IdentifierHelper.findNhsNumber(past); lines.add("NHS number changed from " + nhsNumber + " to " + changedNhsNumber + " on " + sdf.format(wrapperChanged.getCreatedAt())); finding = "NHS number changed"; comment = "NHS number changed on " + sdf.format(wrapperChanged.getCreatedAt()); nhsNumberChanged = true; break; } } if (nhsNumberChanged) { continue; } } //if NHS number didn't change, then it SHOULD match the existing DB SubscriberResourceMappingDalI subscriberResourceMappingDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long enterpriseId = subscriberResourceMappingDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), info.patientUuid); if (enterpriseId == null) { finding = "ERROR"; comment = "Matches current NHS number, so should be in subscriber DB but can't find enterprise ID"; lines.add("" + info.patientUuid + ": no enterprise ID found"); continue; } List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); EnterpriseConnector.ConnectionWrapper first = connectionWrappers.get(0); sql = "SELECT id, target_skid" + " FROM patient" + " LEFT OUTER JOIN link_distributor" + " ON patient.pseudo_id = link_distributor.source_skid" + " WHERE patient.id = ?"; Connection enterpriseConnection = first.getConnection(); PreparedStatement enterpriseStatement = enterpriseConnection.prepareStatement(sql); enterpriseStatement.setLong(1, enterpriseId.longValue()); rs = enterpriseStatement.executeQuery(); if (rs.next()) { long id = rs.getLong(1); String pseudoId = rs.getString(2); lines.add("" + info.patientUuid + ": enterprise ID " + id + " with pseudo ID " + pseudoId); lines.add("" + info.patientUuid + ": expected pseudo ID " + calcPseudoId); if (pseudoId.equals(calcPseudoId)) { finding = "Match"; comment = "Matches current NHS number and is in subscriber DB"; lines.add("" + info.patientUuid + ": found in subscriber DB with right pseudo ID"); } else { finding = "Mis-match"; comment = "Matches current NHS number and is in subscriber DB but pseudo ID is different"; lines.add("" + info.patientUuid + ": found in subscriber DB but with wrong pseudo ID"); } } else { finding = "ERROR"; comment = "Matches current NHS number and enterprise ID = " + enterpriseId + " but not in DB"; } enterpriseStatement.close(); enterpriseConnection.close(); continue; } //if we've not found anything above, check patient_search for the NHS number to see if we can work out where they are if (finding == null) { lines.add("Checking patient_search"); //check patient search edsEntityManager = ConnectionManager.getEdsEntityManager(); edsSession = (SessionImpl)edsEntityManager.getDelegate(); edsConnection = edsSession.connection(); sql = "select local_id, ccg_code, pse.registration_start" + " from eds.patient_search ps" + " inner join admin.service s" + " on s.id = ps.service_id" + " and s.organisation_type = 'PR'" + " inner join eds.patient_search_episode pse" + " on pse.service_id = ps.service_id" + " and pse.patient_id = ps.patient_id" + " and pse.registration_end is null" + " where nhs_number = ?" + " order by pse.registration_start desc" + " limit 1"; ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); //LOG.debug(sql); rs = ps.executeQuery(); if (rs.next()) { String odsCode = rs.getString(1); String ccgCode = rs.getString(2); Date regDate = new Date(rs.getTimestamp(3).getTime()); OdsOrganisation odsOrg = OdsWebService.lookupOrganisationViaRest(odsCode); OdsOrganisation parentOdsOrg = null; if (!Strings.isNullOrEmpty(ccgCode)) { parentOdsOrg = OdsWebService.lookupOrganisationViaRest(ccgCode); } if (odsOrg == null) { lines.add("Registered at " + odsCode + " but failed to find ODS record for " + odsCode); finding = "ERROR"; comment = "Registered at " + odsCode + " but not found in open ODS"; } else if (parentOdsOrg == null) { finding = "ERROR"; comment = "Registered at " + odsOrg.getOdsCode() + " " + odsOrg.getOrganisationName() + " but no ODS record found for parent " + ccgCode; } else { finding = "Out of area"; comment = "Patient registered in " + parentOdsOrg.getOdsCode() + " " + parentOdsOrg.getOrganisationName() + " since " + sdf.format(regDate); } } else { finding = "Unknown"; comment = "No data for NHS number found (within scope of DDS)"; } ps.close(); edsEntityManager.close(); } outputPrinter.printRecord(nhsNumber, calcPseudoId, finding, comment); } Files.write(outputTextFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); outputPrinter.close(); LOG.info("Finished Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); } catch (Throwable t) { LOG.error("", t); } } static class PatientInfo { String patientUuid; String serviceUuid; String odsCode; String ccgCode; @Override public String toString() { return "ods " + odsCode + ", ccgCode " + ccgCode + ", serviceUuid " + serviceUuid + ", patientUUID " + patientUuid; } }*/ /*static class NhsNumberInfo { String odsCode; String date; String patientGuid; String patientUuid; String nhsNumber; String deleted; @Override public String toString() { return "ods " + odsCode + ", date " + date + ", patientGuid " + patientGuid + ", patientUUID " + patientUuid + ", NHS " + nhsNumber + ", deleted " + deleted; } } private static void investigateMissingPatients(String nhsNumberFile, String protocolName, String subscriberConfigName, String odsCodeRegex) { LOG.info("Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); //go through all publishers and find all history of NHS numbers File nhsNumberHistoryFile = new File(protocolName.replace(" ", "_") + "_NHS_number_history.txt"); if (!nhsNumberHistoryFile.exists()) { LOG.info("Need to create NHS number history file " + nhsNumberHistoryFile); LibraryItem matchedLibraryItem = null; LibraryDalI repository = DalProvider.factoryLibraryDal(); List<ActiveItem> activeItems = repository.getActiveItemByTypeId(Integer.valueOf(DefinitionItemType.Protocol.getValue()), Boolean.valueOf(false)); for (ActiveItem activeItem: activeItems) { Item item = repository.getItemByKey(activeItem.getItemId(), activeItem.getAuditId()); String xml = item.getXmlContent(); LibraryItem libraryItem = (LibraryItem) XmlSerializer.deserializeFromString(LibraryItem.class, xml, (String)null); String name = libraryItem.getName(); if (name.equals(protocolName)) { matchedLibraryItem = libraryItem; break; } } if (matchedLibraryItem == null) { throw new Exception("Failed to find protocol"); } for (ServiceContract serviceContract: matchedLibraryItem.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER) { continue; } String serviceIdStr = serviceContract.getService().getUuid(); UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceDal.getById(serviceId); String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } LOG.info("Doing " + service); List<String> lines = new ArrayList<>(); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient"); if (patientFile == null) { continue; } Date dataDate = exchange.getHeaderAsDate(HeaderKeys.DataDate); String dateDateStr = sdf.format(dataDate); //work out file version List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); //create the parser String path = patientFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); String patientUuidStr = null; UUID patientUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.Patient, patientGuid); if (patientUuid == null) { patientUuidStr = "NOUUID"; } else { patientUuidStr = patientUuid.toString(); } CsvCell nhsNumberCell = parser.getNhsNumber(); String nhsNumber = nhsNumberCell.getString(); if (Strings.isNullOrEmpty(nhsNumber)) { nhsNumber = "BLANK"; } else { nhsNumber.replace(" ", ""); } CsvCell deletedCell = parser.getDeleted(); String deletedStr = deletedCell.getString(); lines.add(odsCode + "_" + dateDateStr + "_" + patientGuid + "_" + patientUuidStr + "_" + nhsNumber + "_" + deletedStr); } parser.close(); } } Files.write(nhsNumberHistoryFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done " + service); } LOG.info("Created NHS number history file"); } Map<String, Set<String>> hmNhsNumberToPatientGuid = new HashMap<>(); Map<String, List<NhsNumberInfo>> hmPatientGuidHistory = new HashMap<>(); LOG.info("Reading in NHS number history"); int total = 0; String currentOdsCode = null; int odsCodesDone = 0; int totalAtOdsCode = 0; FileReader fr = new FileReader(nhsNumberHistoryFile); BufferedReader br = new BufferedReader(fr); while (true) { String historyLine = br.readLine(); if (historyLine == null) { break; } try { String[] toks = historyLine.split("_"); NhsNumberInfo info = new NhsNumberInfo(); info.odsCode = toks[0]; info.date = toks[1]; info.patientGuid = toks[2]; info.patientUuid = toks[3]; info.nhsNumber = toks[4]; info.deleted = toks[5]; //skip the Community services if (info.odsCode.equals("16441") || info.odsCode.equals("16456") || info.odsCode.equals("16962") || info.odsCode.equals("16998") || info.odsCode.equals("19594") || info.odsCode.equals("29605") || info.odsCode.equals("30159") || info.odsCode.equals("R1H") || info.odsCode.equals("R1H14") || info.odsCode.equals("R1H15") || info.odsCode.equals("RQX") || info.odsCode.equals("RWKGY")) { continue; } if (currentOdsCode == null || !currentOdsCode.equals(info.odsCode)) { currentOdsCode = info.odsCode; totalAtOdsCode = 0; odsCodesDone ++; LOG.info("Starting " + currentOdsCode + " org " + odsCodesDone); } Set<String> s = hmNhsNumberToPatientGuid.get(info.nhsNumber); if (s == null) { s = new HashSet<>(); hmNhsNumberToPatientGuid.put(info.nhsNumber, s); } s.add(info.patientGuid); List<NhsNumberInfo> l2 = hmPatientGuidHistory.get(info.patientGuid); if (l2 == null) { l2 = new ArrayList<>(); hmPatientGuidHistory.put(info.patientGuid, l2); } boolean addNew = true; if (!l2.isEmpty()) { //if this is just telling us the same as the previous one, ignore it NhsNumberInfo previous = l2.get(l2.size()-1); if (previous.nhsNumber.equals(info.nhsNumber)) { addNew = false; } } if (addNew) { l2.add(info); total ++; totalAtOdsCode ++; if (totalAtOdsCode % 10000 == 0) { LOG.info("Done " + totalAtOdsCode + " at " + currentOdsCode + " (total " + total + ")"); } } } catch (Exception ex) { throw new Exception("Error parsing line [" + historyLine + "]", ex); } } LOG.info("Read in NHS number history"); String salt = null; JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); ArrayNode linked = (ArrayNode)config.get("linkedDistributors"); for (int i=0; i<linked.size(); i++) { JsonNode linkedElement = linked.get(i); String name = linkedElement.get("saltKeyName").asText(); if (name.equals("EGH")) { salt = linkedElement.get("salt").asText(); } } //go through file and check File inputFile = new File(nhsNumberFile); if (!inputFile.exists()) { throw new Exception(nhsNumberFile + " doesn't exist"); } List<String> nhsNumbers = Files.readAllLines(inputFile.toPath()); LOG.info("Found " + nhsNumbers.size()); String fileName = FilenameUtils.getBaseName(nhsNumberFile); File outputCsvFile = new File("OUTPUT_" + fileName + ".csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputCsvFile)); CSVPrinter outputPrinter = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader("nhs_number", "pseudo_id", "finding", "comment")); List<String> lines = new ArrayList<>(); for (String nhsNumber: nhsNumbers) { LOG.debug("Doing " + nhsNumber); PseudoIdBuilder b = new PseudoIdBuilder(subscriberConfigName, "EGH", salt); b.addValueNhsNumber("NhsNumber", nhsNumber, null); String calcPseudoId = b.createPseudoId(); String finding = null; String comment = null; lines.add(">>>>>>>>> " + nhsNumber + " <<<<<<<<<<"); Set<String> patientGuids = hmNhsNumberToPatientGuid.get(nhsNumber); if (patientGuids != null && !patientGuids.isEmpty()) { lines.add("Matches " + patientGuids.size() + " patient GUIDs: " + patientGuids); for (String patientGuid : patientGuids) { List<NhsNumberInfo> history = hmPatientGuidHistory.get(patientGuid); if (history == null) { throw new Exception("No history for patient GUID " + patientGuid); } //some very old data was received into AWS out of order (e.g. F84081), so need to sort them history.sort((o1, o2) -> o1.date.compareTo(o2.date)); //see if it matches the CURRENT NHS number from the Emis data NhsNumberInfo currentInfo = history.get(history.size() - 1); if (currentInfo.nhsNumber.equals(nhsNumber)) { lines.add("" + patientGuid + ": matches CURRENT NHS number (at " + currentInfo.odsCode + "), so SHOULD be in subscriber DB"); SubscriberResourceMappingDalI subscriberResourceMappingDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long enterpriseId = subscriberResourceMappingDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), currentInfo.patientUuid); if (enterpriseId == null) { String startDateStr = TransformConfig.instance().getEmisStartDate(currentInfo.odsCode); Date startDate = new SimpleDateFormat("dd/MM/yyyy").parse(startDateStr); lines.add("Org start date " + startDateStr); Date receivedDate = new SimpleDateFormat("yyyyMMdd").parse(currentInfo.date); //if only received before the start date, then we won't have processed it if (receivedDate.before(startDate)) { lines.add("Patient data received before org start date so won't have been processed"); //leave the finding null so we check patient_search } else { finding = "ERROR"; comment = "Matches current NHS number, so should be in subscriber DB but can't find enterprise ID"; lines.add("" + patientGuid + ": no enterprise ID found"); for (NhsNumberInfo info : history) { lines.add("" + patientGuid + ": " + info); } } } else { List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); EnterpriseConnector.ConnectionWrapper first = connectionWrappers.get(0); String sql = "SELECT id, target_skid" + " FROM patient" + " LEFT OUTER JOIN link_distributor" + " ON patient.pseudo_id = link_distributor.source_skid" + " WHERE patient.id = ?"; Connection enterpriseConnection = first.getConnection(); PreparedStatement enterpriseStatement = enterpriseConnection.prepareStatement(sql); enterpriseStatement.setLong(1, enterpriseId.longValue()); ResultSet rs = enterpriseStatement.executeQuery(); if (rs.next()) { long id = rs.getLong(1); String pseudoId = rs.getString(2); lines.add("" + patientGuid + ": enterprise ID " + id + " with pseudo ID " + pseudoId); //LOG.debug("Salt = " + salt); lines.add("" + patientGuid + ": expected pseudo ID " + calcPseudoId); if (pseudoId.equals(calcPseudoId)) { finding = "Match"; comment = "Matches current NHS number and is in subscriber DB"; lines.add("" + patientGuid + ": found in subscriber DB with right pseudo ID"); } else { finding = "Mis-match"; comment = "Matches current NHS number and is in subscriber DB but pseudo ID is different"; lines.add("" + patientGuid + ": found in subscriber DB but with wrong pseudo ID"); } } else { finding = "ERROR"; comment = "Matches current NHS number and enterprise ID = " + enterpriseId + " but not in DB"; } enterpriseStatement.close(); enterpriseConnection.close(); } } else { lines.add("" + patientGuid + ": doesn't match current NHS number (at " + currentInfo.odsCode + ") which is " + currentInfo.nhsNumber); for (NhsNumberInfo info : history) { lines.add("" + patientGuid + ": " + info); } //find out when the NHS number changed NhsNumberInfo infoChanged = null; for (int i = history.size() - 1; i >= 0; i--) { NhsNumberInfo info = history.get(i); if (info.nhsNumber.equals(nhsNumber)) { infoChanged = history.get(i + 1); break; } } if (infoChanged != null) { lines.add("" + patientGuid + ": NHS number changed on " + infoChanged.date + " (at " + infoChanged.odsCode + ") to " + currentInfo.nhsNumber); finding = "NHS number changed"; comment = "NHS number changed on " + infoChanged.date; //comment = "NHS number changed on " + infoChanged.date + " to " + currentInfo.nhsNumber; } else { lines.add("" + patientGuid + ": ERROR - FAILED TO FIND MATCHING NHS NUMBER IN HISTORY"); finding = "ERROR"; comment = "FAILED TO FIND MATCHING NHS NUMBER IN HISTORY"; } } } } //if we've not found anything above, check patient_search for the NHS number to see if we can work out where they are if (finding == null) { lines.add("Checking patient_search"); //check patient search EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); String sql = "select local_id, ccg_code" + " from eds.patient_search ps" + " inner join admin.service s" + " on s.id = ps.service_id" + " and s.organisation_type = 'PR'" + " inner join eds.patient_search_episode pse" + " on pse.service_id = ps.service_id" + " and pse.patient_id = ps.patient_id" + " and pse.registration_end is null" + " where nhs_number = ?" + " order by pse.registration_start desc" + " limit 1"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); //LOG.debug(sql); ResultSet rs = ps.executeQuery(); if (rs.next()) { String odsCode = rs.getString(1); String ccgCode = rs.getString(2); OdsOrganisation odsOrg = OdsWebService.lookupOrganisationViaRest(odsCode); OdsOrganisation parentOdsOrg = null; if (!Strings.isNullOrEmpty(ccgCode)) { parentOdsOrg = OdsWebService.lookupOrganisationViaRest(ccgCode); } if (odsOrg == null) { lines.add("Registered at " + odsCode + " but failed to find ODS record for " + odsCode); finding = "ERROR"; comment = "Registered at " + odsCode + " but not found in open ODS"; } else if (parentOdsOrg == null) { finding = "ERROR"; comment = "Registered at " + odsOrg.getOdsCode() + " " + odsOrg.getOrganisationName() + " but no ODS record found for parent " + ccgCode; } else { finding = "Out of area"; comment = "Patient registered in " + parentOdsOrg.getOdsCode() + " " + parentOdsOrg.getOrganisationName(); } } else { finding = "Unknown"; comment = "No data for NHS number found (within scope of DDS)"; } ps.close(); edsEntityManager.close(); } outputPrinter.printRecord(nhsNumber, calcPseudoId, finding, comment); } File outputFile = new File("OUTPUT_" + nhsNumberFile); Files.write(outputFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); outputPrinter.close(); LOG.info("Finished Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); } catch (Throwable t) { LOG.error("", t); } }*/ private static void fixMedicationStatementIsActive(String odsCodeRegex) { LOG.info("Fixing MedicationStatement IsActive for using " + odsCodeRegex); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } String serviceIdStr = serviceId.toString(); //find protocols List<LibraryItem> publisherLibraryItems = new ArrayList<>(); List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr, null); for (LibraryItem libraryItem: libraryItems) { for (ServiceContract serviceContract: libraryItem.getProtocol().getServiceContract()) { if (serviceContract.getService().getUuid().equals(serviceIdStr) && serviceContract.getType() == ServiceContractType.PUBLISHER && serviceContract.getActive() == ServiceContractActive.TRUE) { publisherLibraryItems.add(libraryItem); break; } } } if (publisherLibraryItems.isEmpty()) { LOG.debug("Skipping " + service + " as not a publisher to any protocol"); continue; } LOG.debug("Doing " + service); //find subscriber config name and software name for each protocol Map<LibraryItem, String> hmSubscriberConfigNames = new HashMap<>(); Map<LibraryItem, String> hmSoftwareNames = new HashMap<>(); for (LibraryItem libraryItem: publisherLibraryItems) { List<String> subscriberConfigNames = new ArrayList<>(); List<String> softwareNames = new ArrayList<>(); //LOG.debug("Protocol name = " + libraryItem.getName()); for (ServiceContract serviceContract : libraryItem.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER && serviceContract.getActive() == ServiceContractActive.TRUE) { String subscriberConfigName = MessageTransformOutbound.getSubscriberEndpoint(serviceContract); //LOG.debug(" subscriber config = [" + subscriberConfigName + "]"); if (!Strings.isNullOrEmpty(subscriberConfigName)) { String technicalInterfaceUuidStr = serviceContract.getTechnicalInterface().getUuid(); String systemUuidStr = serviceContract.getSystem().getUuid(); TechnicalInterface technicalInterface = LibraryRepositoryHelper.getTechnicalInterfaceDetailsUsingCache(systemUuidStr, technicalInterfaceUuidStr); String software = technicalInterface.getMessageFormat(); //ignore any service contracts not for these formats if (!software.equals(MessageFormat.ENTERPRISE_CSV)) { /*if (!software.equals(MessageFormat.ENTERPRISE_CSV) && !software.equals(MessageFormat.SUBSCRIBER_CSV)) {*/ continue; } if (!subscriberConfigNames.contains(subscriberConfigName)) { subscriberConfigNames.add(subscriberConfigName); } if (!softwareNames.contains(software)) { softwareNames.add(software); } } } } //the DPA protocols won't have any subscribers if (subscriberConfigNames.size() == 0) { LOG.debug("Failed to find subscriber config name for protocol " + libraryItem.getName()); continue; } if (subscriberConfigNames.size() > 1) { throw new Exception("Found more than one subscriber config name for protocol " + libraryItem.getName() + ": " + subscriberConfigNames); } String subscriberConfigName = subscriberConfigNames.get(0); String softwareName = softwareNames.get(0); hmSubscriberConfigNames.put(libraryItem, subscriberConfigName); hmSoftwareNames.put(libraryItem, softwareName); LOG.info("Protocol " + libraryItem.getName() + " -> " + softwareName + " @ " + subscriberConfigName); } List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); Map<String, List<Long>> hmMedicationStatementIdsForService = new HashMap<>(); int found = 0; for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + found); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.MedicationStatement.toString()); for (ResourceWrapper resourceWrapper: resourceWrappers) { MedicationStatement medicationStatement = (MedicationStatement)resourceWrapper.getResource(); boolean isActive = medicationStatement.hasStatus() && medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE; if (!isActive) { continue; } found ++; for (LibraryItem libraryItem: publisherLibraryItems) { String subscriberConfigName = hmSubscriberConfigNames.get(libraryItem); if (Strings.isNullOrEmpty(subscriberConfigName)) { continue; } String softwareName = hmSoftwareNames.get(libraryItem); SubscriberResourceMappingDalI subscriberDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long id = null; if (softwareName.equals(MessageFormat.ENTERPRISE_CSV)) { Long enterpriseId = subscriberDal.findEnterpriseIdOldWay(ResourceType.MedicationStatement.toString(), resourceWrapper.getResourceId().toString()); if (enterpriseId != null) { id = enterpriseId; } } else if (softwareName.equals(MessageFormat.SUBSCRIBER_CSV)) { String ref = resourceWrapper.getReferenceString(); SubscriberId subscriberId = subscriberDal.findSubscriberId(SubscriberTableId.MEDICATION_STATEMENT.getId(), ref); if (subscriberId != null) { id = subscriberId.getSubscriberId(); } } else { //throw new Exception("Unexpected software name " + softwareName); } if (id != null) { List<Long> l = hmMedicationStatementIdsForService.get(subscriberConfigName); if (l == null) { l = new ArrayList<>(); hmMedicationStatementIdsForService.put(subscriberConfigName, l); } l.add(id); } } } } LOG.info("Finished, Done " + patientUuids.size() + " and found " + found); for (String subscriberConfigName: hmMedicationStatementIdsForService.keySet()) { List<Long> medicationStatementIdsForService = hmMedicationStatementIdsForService.get(subscriberConfigName); List<String> lines = new ArrayList<>(); lines.add("#" + odsCode); List<String> batch = new ArrayList<>(); while (!medicationStatementIdsForService.isEmpty()) { Long l = medicationStatementIdsForService.remove(0); batch.add(l.toString()); if (medicationStatementIdsForService.isEmpty() || batch.size() > 50) { String sql = "UPDATE medication_statement SET is_active = 1 WHERE cancellation_date IS NULL AND id IN (" + String.join(",", batch) + ");"; lines.add(sql); batch.clear(); } } LOG.debug("Going to write to file"); File f = new File(subscriberConfigName + ".sql"); Files.write(f.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done write to file"); } } LOG.info("Fixing MedicationStatement IsActive for using " + odsCodeRegex); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixMedicationStatementIsActive(String protocolName, String filePath, String odsCodeRegex) { LOG.info("Fixing MedicationStatement IsActive for " + protocolName + " to " + filePath + " matching orgs using " + odsCodeRegex); try { Set<String> odsCodesDone = new HashSet<>(); File f = new File(filePath); if (f.exists()) { List<String> lines = FileUtils.readLines(f); for (String line: lines) { if (line.startsWith("#")) { String odsCode = line.substring(1); odsCodesDone.add(odsCode); } } } //find services in protocol //find subscriber config details ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String serviceIdStr = serviceId.toString(); //find protocol LibraryItem matchedProtocol = null; List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId.toString(), null); for (LibraryItem protocol: libraryItems) { if (protocol.getName().equalsIgnoreCase(protocolName)) { matchedProtocol = protocol; break; } } if (matchedProtocol == null) { LOG.debug("Skipping " + service + " as not in protocol " + protocolName); continue; } //ensure in protocol as a publisher boolean isPublisher = false; for (ServiceContract serviceContract: matchedProtocol.getProtocol().getServiceContract()) { if (serviceContract.getService().getUuid().equals(serviceIdStr) && serviceContract.getType() == ServiceContractType.PUBLISHER && serviceContract.getActive() == ServiceContractActive.TRUE) { isPublisher = true; break; } } if (!isPublisher) { LOG.debug("Skipping " + service + " as not a publisher to protocol " + protocolName); continue; } String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } if (odsCodesDone.contains(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); //find subscriber config name List<String> subscriberConfigNames = new ArrayList<>(); List<String> softwareNames = new ArrayList<>(); for (ServiceContract serviceContract: matchedProtocol.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER && serviceContract.getActive() == ServiceContractActive.TRUE) { String subscriberConfigName = MessageTransformOutbound.getSubscriberEndpoint(serviceContract); if (!Strings.isNullOrEmpty(subscriberConfigName)) { subscriberConfigNames.add(subscriberConfigName); String technicalInterfaceUuidStr = serviceContract.getTechnicalInterface().getUuid(); String systemUuidStr = serviceContract.getSystem().getUuid(); TechnicalInterface technicalInterface = LibraryRepositoryHelper.getTechnicalInterfaceDetailsUsingCache(systemUuidStr, technicalInterfaceUuidStr); String software = technicalInterface.getMessageFormat(); softwareNames.add(software); } } } if (subscriberConfigNames.size() == 0) { throw new Exception("Failed to find subscriber config name for protocol " + protocolName); } if (subscriberConfigNames.size() > 1) { throw new Exception("Found more than one subscriber config name for protocol " + protocolName); } String subscriberConfigName = subscriberConfigNames.get(0); String softwareName = softwareNames.get(0); LOG.info("Protocol " + protocolName + " -> " + softwareName + " @ " + subscriberConfigName); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); SubscriberResourceMappingDalI subscriberDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); List<Long> medicationStatementIdsForService = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + medicationStatementIdsForService.size()); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.MedicationStatement.toString()); for (ResourceWrapper resourceWrapper: resourceWrappers) { MedicationStatement medicationStatement = (MedicationStatement)resourceWrapper.getResource(); boolean isActive = medicationStatement.hasStatus() && medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE; if (isActive) { if (softwareName.equals(MessageFormat.ENTERPRISE_CSV)) { Long enterpriseId = subscriberDal.findEnterpriseIdOldWay(ResourceType.MedicationStatement.toString(), resourceWrapper.getResourceId().toString()); if (enterpriseId != null) { medicationStatementIdsForService.add(enterpriseId); } } else if (softwareName.equals(MessageFormat.SUBSCRIBER_CSV)) { String ref = resourceWrapper.getReferenceString(); SubscriberId subscriberId = subscriberDal.findSubscriberId(SubscriberTableId.MEDICATION_STATEMENT.getId(), ref); if (subscriberId != null) { medicationStatementIdsForService.add(subscriberId.getSubscriberId()); } } else { throw new Exception("Unexpected software name " + softwareName); } } } } LOG.info("Done " + patientUuids.size() + " and found " + medicationStatementIdsForService.size()); odsCodesDone.add(odsCode); List<String> lines = new ArrayList<>(); lines.add("#" + odsCode); List<String> batch = new ArrayList<>(); while (!medicationStatementIdsForService.isEmpty()) { Long l = medicationStatementIdsForService.remove(0); batch.add(l.toString()); if (medicationStatementIdsForService.isEmpty() || batch.size() > 50) { String sql = "UPDATE medication_statement SET is_active = 1 WHERE cancellation_date IS NULL AND id IN (" + String.join(",", batch) + ");"; lines.add(sql); batch.clear(); } if (lines.size() % 10 == 0) { LOG.debug("Created " + lines.size() + " lines with " + medicationStatementIdsForService.size() + " IDs remaining"); } } LOG.debug("Going to write to file"); Files.write(f.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done write to file"); } LOG.debug("Written to " + f); LOG.info("Finished Fixing Missing Emis Ethnicities to " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /** * restores the ethnicity and marital statuses on the Patient resources that were lost * if the "re-registrated patients" fix was run before the "deleted patients" fix. This meant that * the patient resource was re-created from the patient file but the ethnicity and marital status weren't carried * over from the pre-deleted version. */ /*private static void fixMissingEmisEthnicities(String filePath, String filterRegexOdsCode) { LOG.info("Fixing Missing Emis Ethnicities to " + filePath + " matching orgs using " + filterRegexOdsCode); try { Map<String, List<UUID>> hmPatientIds = new HashMap<>(); File f = new File(filePath); if (f.exists()) { List<String> lines = FileUtils.readLines(f); String currentOdsCode = null; for (String line: lines) { if (line.startsWith("#")) { currentOdsCode = line.substring(1); } else { UUID patientId = UUID.fromString(line); List<UUID> s = hmPatientIds.get(currentOdsCode); if (s == null) { s = new ArrayList<>(); hmPatientIds.put(currentOdsCode, s); } s.add(patientId); } } } ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { String odsCode = service.getLocalId(); if (filterRegexOdsCode != null && !Pattern.matches(filterRegexOdsCode, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } if (hmPatientIds.containsKey(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> patientIdsForService = new ArrayList<>(); List<ResourceWrapper> resourceWrappersToSave = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + patientIdsForService.size()); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); ResourceWrapper current = history.get(0); if (current.isDeleted()) { continue; } //if only one history record, no point looking back if (history.size() == 1) { continue; } Patient p = (Patient)current.getResource(); PatientBuilder patientBuilder = new PatientBuilder(p); //see if both already present EthnicCategory currentEthnicCategory = patientBuilder.getEthnicity(); MaritalStatus currentMaritalStatus = patientBuilder.getMaritalStatus(); if (currentEthnicCategory != null && currentMaritalStatus != null) { continue; } EthnicCategory newEthnicCategory = null; MaritalStatus newMaritalStatus = null; for (int j=1; j<history.size(); j++) { ResourceWrapper previousWrapper = history.get(j); if (previousWrapper.isDeleted()) { continue; } Patient previous = (Patient)previousWrapper.getResource(); PatientBuilder previousPatientBuilder = new PatientBuilder(previous); if (newEthnicCategory == null) { newEthnicCategory = previousPatientBuilder.getEthnicity(); } if (newMaritalStatus == null) { newMaritalStatus = previousPatientBuilder.getMaritalStatus(); } } if (newEthnicCategory == currentEthnicCategory && newMaritalStatus == currentMaritalStatus) { continue; } boolean fixed = false; if (newEthnicCategory != null) { patientBuilder.setEthnicity(newEthnicCategory); fixed = true; } if (newMaritalStatus != null) { patientBuilder.setMaritalStatus(newMaritalStatus); fixed = true; } if (fixed) { p = (Patient) patientBuilder.getResource(); String newJson = FhirSerializationHelper.serializeResource(p); current.setResourceData(newJson); patientIdsForService.add(patientUuid); resourceWrappersToSave.add(current); } } LOG.info("Done " + patientUuids.size() + " and found " + patientIdsForService.size()); hmPatientIds.put(odsCode, patientIdsForService); List<String> lines = new ArrayList<>(); for (String odsCodeDone: hmPatientIds.keySet()) { lines.add("#" + odsCodeDone); List<UUID> patientIdsDone = hmPatientIds.get(odsCodeDone); for (UUID patientIdDone: patientIdsDone) { lines.add(patientIdDone.toString()); } } FileUtils.writeLines(f, lines); //only now we've stored the affected patient IDs in the file should we actually update the DB for (ResourceWrapper wrapper: resourceWrappersToSave) { saveResourceWrapper(serviceId, wrapper); } //and re-queue the affected patients for sending to subscribers QueueHelper.queueUpPatientsForTransform(patientIdsForService); } LOG.debug("Written to " + f); LOG.info("Finished Fixing Missing Emis Ethnicities to " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /** * updates patient_search and patient_link tables for explicit list of patient UUIDs */ /*private static void updatePatientSearch(String filePath) throws Exception { LOG.info("Updating patient search from " + filePath); try { File f = new File(filePath); if (!f.exists()) { LOG.error("File " + f + " doesn't exist"); return; } List<UUID> patientIds = new ArrayList<>(); List<String> lines = FileUtils.readLines(f); for (String line: lines) { line = line.trim(); if (line.startsWith("#")) { continue; } UUID uuid = UUID.fromString(line); patientIds.add(uuid); } LOG.info("Found " + patientIds.size() + " patient UUIDs"); Map<String, UUID> hmPublishers = new HashMap<>(); List<String> publishers = new ArrayList<>(); publishers.add("publisher_01"); publishers.add("publisher_02"); publishers.add("publisher_03"); publishers.add("publisher_04"); publishers.add("publisher_04b"); publishers.add("publisher_05"); publishers.add("publisher_05_nwl_tmp"); publishers.add("publisher_05_sel_tmp"); File changedFile = new File(filePath + "changed"); for (String publisher: publishers) { UUID serviceId = findSuitableServiceIdForPublisherConfig(publisher); hmPublishers.put(publisher, serviceId); } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); int done = 0; int skipped = 0; for (UUID patientId: patientIds) { LOG.info("Doing patient " + patientId); //we need to find a service ID for the patient, so we know where to get the resources from UUID serviceId = null; //try the patient_search table first PatientSearch ps = patientSearchDal.searchByPatientId(patientId); if (ps != null) { serviceId = ps.getServiceId(); } //if service ID is still null, then try looking in the patient_link table if (serviceId == null) { String personId = patientLinkDal.getPersonId(patientId.toString()); Map<String, String> map = patientLinkDal.getPatientAndServiceIdsForPerson(personId); if (map.containsKey(patientId.toString())) { serviceId = UUID.fromString(map.get(patientId.toString())); } } List<ResourceWrapper> history = null; if (serviceId != null) { //if we have a service ID, then retrieve the resource history directly from that DB history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientId); } else { //if we still don't have a service ID, then test each Corexx DB in turn for (String publisher: hmPublishers.keySet()) { UUID exampleServiceId = hmPublishers.get(publisher); List<ResourceWrapper> publisherHistory = resourceDal.getResourceHistory(exampleServiceId, ResourceType.Patient.toString(), patientId); if (!publisherHistory.isEmpty()) { history = publisherHistory; LOG.info("Found resource history for patient " + patientId + " on " + publisher); break; } } } if (history == null || history.isEmpty()) { LOG.error("Failed to find any resource history for patient " + patientId); skipped ++; continue; } ResourceWrapper mostRecent = history.get(0); serviceId = mostRecent.getServiceId(); PatientLinkPair patientLink = null; if (mostRecent.isDeleted()) { //find most recent non-deleted ResourceWrapper nonDeleted = null; for (ResourceWrapper wrapper: history) { if (!wrapper.isDeleted()) { nonDeleted = wrapper; break; } } if (nonDeleted == null) { LOG.error("No non-deleted Patient resource for " + patientId); skipped ++; continue; } Patient p = (Patient)nonDeleted.getResource(); patientSearchDal.update(serviceId, p); patientLink = patientLinkDal.updatePersonId(serviceId, p); //and call this to mark the patient_search record as deleted patientSearchDal.deletePatient(serviceId, p); } else { //LOG.debug("Patient wasn't deleted"); Patient p = (Patient)mostRecent.getResource(); patientSearchDal.update(serviceId, p); patientLink = patientLinkDal.updatePersonId(serviceId, p); } //if the person ID was changed, write this to a file if (patientLink.getNewPersonId() != null) { List<String> updateLines = new ArrayList<>(); updateLines.add(patientId.toString()); Files.write(changedFile.toPath(), updateLines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } done ++; if (done % 500 == 0) { LOG.debug("Done " + done + " Skipped " + skipped); } } LOG.debug("Done " + done + " Skipped " + skipped); LOG.info("Finished Updating patient search from " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void runPersonUpdater(String enterpriseConfigName) throws Exception { try { LOG.info("Person updater starting for " + enterpriseConfigName); //create this date BEFORE we get the date we last run, so there's no risk of a gap Date dateNextRun = new Date(); EnterprisePersonUpdaterHistoryDalI enterprisePersonUpdaterHistoryDal = DalProvider.factoryEnterprisePersonUpdateHistoryDal(enterpriseConfigName); Date dateLastRun = enterprisePersonUpdaterHistoryDal.findDatePersonUpdaterLastRun(); LOG.info("Looking for Person ID changes since " + dateLastRun); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); List<PatientLinkPair> changes = patientLinkDal.getChangesSince(dateLastRun); LOG.info("Found " + changes.size() + " changes in Person ID"); //find the Enterprise Person ID for each of the changes, hashing them by the enterprise instance they're on List<UpdateJob> updates = convertChangesToEnterprise(enterpriseConfigName, changes); List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(enterpriseConfigName); for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { LOG.info("Updating " + updates.size() + " person IDs on " + connectionWrapper.getUrl()); Connection connection = connectionWrapper.getConnection(); try { List<String> tables = findTablesWithPersonId(connection); for (UpdateJob update: updates) { changePersonId(update, connection, tables); } //and delete any person records that no longer have any references to them LOG.info("Going to delete orphaned persons"); deleteOrphanedPersons(connection); } finally { connection.close(); } } enterprisePersonUpdaterHistoryDal.updatePersonUpdaterLastRun(dateNextRun); LOG.info("Person updates complete"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void deleteOrphanedPersons(Connection connection) throws Exception { String sql = "SELECT id FROM person" + " WHERE NOT EXISTS (" + " SELECT 1" + " FROM patient" + " WHERE patient.person_id = person.id)"; PreparedStatement ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); List<Long> ids = new ArrayList<>(); while (rs.next()) { long id = rs.getLong(1); ids.add(new Long(id)); } LOG.info("Found " + ids.size() + " orphaned persons to delete"); rs.close(); ps.close(); sql = "DELETE FROM person WHERE id = ?"; ps = connection.prepareStatement(sql); for (int i=0; i<ids.size(); i++) { Long id = ids.get(i); ps.setLong(1, id); ps.addBatch(); //execute the batch every 50 and at the end if (i % 50 == 0 || i+1 == ids.size()) { ps.executeBatch(); } } connection.commit(); }*/ /*private static List<UpdateJob> convertChangesToEnterprise(String enterpriseConfigName, List<PatientLinkPair> changes) throws Exception { List<UpdateJob> updatesForConfig = new ArrayList<>(); for (PatientLinkPair change: changes) { String oldDiscoveryPersonId = change.getPreviousPersonId(); String newDiscoveryPersonId = change.getNewPersonId(); String discoveryPatientId = change.getPatientId(); SubscriberResourceMappingDalI enterpriseIdDalI = DalProvider.factorySubscriberResourceMappingDal(enterpriseConfigName); Long enterprisePatientId = enterpriseIdDalI.findEnterpriseIdOldWay(ResourceType.Patient.toString(), discoveryPatientId); //if this patient has never gone to enterprise, then skip it if (enterprisePatientId == null) { continue; } SubscriberPersonMappingDalI personMappingDal = DalProvider.factorySubscriberPersonMappingDal(enterpriseConfigName); List<Long> mappings = personMappingDal.findEnterprisePersonIdsForPersonId(oldDiscoveryPersonId); for (Long oldEnterprisePersonId: mappings) { Long newEnterprisePersonId = personMappingDal.findOrCreateEnterprisePersonId(newDiscoveryPersonId); updatesForConfig.add(new UpdateJob(enterprisePatientId, oldEnterprisePersonId, newEnterprisePersonId)); } } return updatesForConfig; }*/ private static void changePersonId(UpdateJob change, Connection connection, List<String> tables) throws Exception { for (String tableName: tables) { changePersonIdOnTable(tableName, change, connection); } connection.commit(); LOG.info("Updated person ID from " + change.getOldEnterprisePersonId() + " to " + change.getNewEnterprisePersonId() + " for patient " + change.getEnterprisePatientId()); } /*private static void changePersonId(UpdateJob change, Connection connection) throws Exception { OutputContainer outputContainer = new OutputContainer(true); //doesn't matter what we pass into the constructor //the csv writers are mapped to the tables in the database, so we can use them to discover //what tables have person and patient ID columns List<AbstractEnterpriseCsvWriter> csvWriters = outputContainer.getCsvWriters(); //the writers are in dependency order (least dependent -> most) so we need to go backwards to avoid //upsetting any foreign key constraints for (int i=csvWriters.size()-1; i>=0; i--) { AbstractEnterpriseCsvWriter csvWriter = csvWriters.get(i); String[] csvHeaders = csvWriter.getCsvHeaders(); for (String header: csvHeaders) { if (header.equalsIgnoreCase("person_id")) { String fileName = csvWriter.getFileName(); String tableName = FilenameUtils.removeExtension(fileName); changePersonIdOnTable(tableName, change, connection); break; } } } connection.commit(); LOG.info("Updated person ID from " + change.getOldEnterprisePersonId() + " to " + change.getNewEnterprisePersonId() + " for patient " + change.getEnterprisePatientId()); }*/ /*private static List<String> findTablesWithPersonId(Connection connection) throws Exception { Statement statement = connection.createStatement(); String dbNameSql = "SELECT DATABASE()"; ResultSet rs = statement.executeQuery(dbNameSql); rs.next(); String dbName = rs.getString(1); rs.close(); String tableNameSql = "SELECT t.table_name" + " FROM information_schema.tables t" + " INNER JOIN information_schema.columns c" + " ON c.table_name = t.table_name" + " AND c.table_schema = t.table_schema" + " WHERE t.table_schema = '" + dbName + "'" + " AND c.column_name = 'person_id'"; rs = statement.executeQuery(tableNameSql); List<String> ret = new ArrayList<>(); while (rs.next()) { String tableName = rs.getString(1); ret.add(tableName); } rs.close(); statement.close(); return ret; }*/ private static void changePersonIdOnTable(String tableName, UpdateJob change, Connection connection) throws Exception { StringBuilder sb = new StringBuilder(); sb.append("UPDATE "); sb.append(tableName); sb.append(" SET "); sb.append("person_id = ? "); sb.append("WHERE "); if (tableName.equals("patient")) { sb.append("id = ? "); } else { sb.append("patient_id = ? "); } sb.append("AND person_id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); update.setLong(1, change.getNewEnterprisePersonId()); update.setLong(2, change.getEnterprisePatientId()); update.setLong(3, change.getOldEnterprisePersonId()); update.addBatch(); update.executeBatch(); } static class UpdateJob { private Long enterprisePatientId = null; private Long oldEnterprisePersonId = null; private Long newEnterprisePersonId = null; public UpdateJob(Long enterprisePatientId, Long oldEnterprisePersonId, Long newEnterprisePersonId) { this.enterprisePatientId = enterprisePatientId; this.oldEnterprisePersonId = oldEnterprisePersonId; this.newEnterprisePersonId = newEnterprisePersonId; } public Long getEnterprisePatientId() { return enterprisePatientId; } public Long getOldEnterprisePersonId() { return oldEnterprisePersonId; } public Long getNewEnterprisePersonId() { return newEnterprisePersonId; } } /*private static void findPatientsThatNeedTransforming(String file, String filterOdsCode) { LOG.info("Finding patients that need transforming for " + filterOdsCode + " for " + file); try { Map<String, List<UUID>> hmPatientIds = new HashMap<>(); File f = new File(file); if (f.exists()) { List<String> lines = FileUtils.readLines(f); String currentOdsCode = null; for (String line: lines) { if (line.startsWith("#")) { currentOdsCode = line.substring(1); } else { UUID patientId = UUID.fromString(line); List<UUID> s = hmPatientIds.get(currentOdsCode); if (s == null) { s = new ArrayList<>(); hmPatientIds.put(currentOdsCode, s); } s.add(patientId); } } } ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { String odsCode = service.getLocalId(); if (filterOdsCode != null && filterOdsCode.equals(odsCode)) { continue; } if (hmPatientIds.containsKey(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs"); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> patientIdsForService = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { UUID patientUuid = patientUuids.get(i); boolean shouldBeInSubscriber = false; List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); boolean addPatient = false; for (int j = history.size() - 1; j >= 0; j--) { ResourceWrapper wrapper = history.get(j); if (wrapper.isDeleted()) { continue; } Patient patient = (Patient) wrapper.getResource(); //any confidential patient should be in the DB because they were previously filtered out BooleanType bt = (BooleanType) ExtensionConverter.findExtensionValue(patient, FhirExtensionUri.IS_CONFIDENTIAL); if (bt != null && bt.hasValue() && bt.getValue().booleanValue()) { addPatient = true; break; } //and patient w/o NHS number should be in the DB because they were previously filtered out //any patient with 999999 NHS number should be added so they get stripped out String nhsNumber = IdentifierHelper.findNhsNumber(patient); if (Strings.isNullOrEmpty(nhsNumber) || nhsNumber.startsWith("999999")) { addPatient = true; break; } if (j == history.size() - 1) { //find first NHS number known shouldBeInSubscriber = SubscriberTransformHelper.shouldPatientBePresentInSubscriber(patient); } else { boolean thisShouldBeInSubscriber = SubscriberTransformHelper.shouldPatientBePresentInSubscriber(patient); if (shouldBeInSubscriber != thisShouldBeInSubscriber) { addPatient = true; break; } } } if (addPatient) { patientIdsForService.add(patientUuid); } if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + patientIdsForService.size()); } } hmPatientIds.put(odsCode, patientIdsForService); LOG.debug("Found " + patientIdsForService.size() + " affected"); QueueHelper.queueUpPatientsForTransform(patientIdsForService); List<String> lines = new ArrayList<>(); for (String odsCodeDone: hmPatientIds.keySet()) { lines.add("#" + odsCodeDone); List<UUID> patientIdsDone = hmPatientIds.get(odsCodeDone); for (UUID patientIdDone: patientIdsDone) { lines.add(patientIdDone.toString()); } } FileUtils.writeLines(f, lines); } LOG.debug("Written to " + f); } catch (Throwable t) { LOG.error("", t); } }*/ private static void transformPatients(String sourceFile) { LOG.info("Transforming patients from " + sourceFile); try { List<UUID> patientIds = new ArrayList<>(); File f = new File(sourceFile); if (!f.exists()) { LOG.error("File " + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); for (String line: lines) { line = line.trim(); if (line.startsWith("#")) { continue; } UUID uuid = UUID.fromString(line); patientIds.add(uuid); } if (patientIds.isEmpty()) { LOG.error("No patient IDs found"); return; } LOG.info("Found " + patientIds.size() + " patient IDs"); QueueHelper.queueUpPatientsForTransform(patientIds); LOG.info("Finished transforming patients from " + sourceFile); } catch (Throwable t) { LOG.error("", t); } } /*private static void countNhsNumberChanges(String odsCodes) { LOG.info("Counting NHS number changes for " + odsCodes); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); Map<String, Date> hmEarliestDate = new HashMap<>(); Map<String, Integer> hmPatientCount = new HashMap<>(); Map<String, Map<Date, List<UUID>>> hmCounts = new HashMap<>(); String[] toks = odsCodes.split(","); for (String odsCode: toks) { Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Doing " + service.getName() + " " + service.getLocalId()); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs"); Date earliestDate = null; Map<Date, List<UUID>> hmChanges = new HashMap<>(); for (int i = 0; i < patientUuids.size(); i++) { UUID patientUuid = patientUuids.get(i); String previousNhsNumber = null; List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); for (int j = history.size() - 1; j >= 0; j--) { ResourceWrapper wrapper = history.get(j); Date d = wrapper.getCreatedAt(); //work out bulk date if (earliestDate == null || d.before(earliestDate)) { earliestDate = d; } if (wrapper.isDeleted()) { continue; } Patient patient = (Patient) wrapper.getResource(); if (j == history.size() - 1) { //find first NHS number known previousNhsNumber = IdentifierHelper.findNhsNumber(patient); } else { String thisNhsNumber = IdentifierHelper.findNhsNumber(patient); if ((thisNhsNumber == null && previousNhsNumber != null) //|| (thisNhsNumber != null && previousNhsNumber == null) //don't count it going FROM null to non-null as a change || (thisNhsNumber != null && previousNhsNumber != null && !thisNhsNumber.equals(previousNhsNumber))) { //changed LOG.info("" + patientUuid + " changed NHS number on " + sdf.format(d)); List<UUID> l = hmChanges.get(d); if (l == null) { l = new ArrayList<>(); hmChanges.put(d, l); } l.add(patientUuid); previousNhsNumber = thisNhsNumber; } } } if (i % 1000 == 0) { LOG.info("Done " + i); } } hmEarliestDate.put(odsCode, earliestDate); hmPatientCount.put(odsCode, new Integer(patientUuids.size())); hmCounts.put(odsCode, hmChanges); } List<String> colHeaders = new ArrayList<>(); colHeaders.add("Year"); colHeaders.add("Month"); colHeaders.addAll(Arrays.asList(toks)); String[] headerArray = colHeaders.toArray(new String[]{}); CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader(headerArray); FileWriter fileWriter = new FileWriter("NHS_number_changes.csv"); CSVPrinter csvPrinter = new CSVPrinter(fileWriter, csvFormat); //patient count List<String> row = new ArrayList<>(); row.add("Patient Count"); row.add(""); for (String odsCode: toks) { Integer count = hmPatientCount.get(odsCode); if (count == null) { row.add("0"); } else { row.add("" + count); } } csvPrinter.printRecord(row.toArray()); //start date row = new ArrayList<>(); row.add("Bulk Date"); row.add(""); for (String odsCode: toks) { Date startDate = hmEarliestDate.get(odsCode); if (startDate == null) { row.add("not found"); } else { row.add("" + sdf.format(startDate)); } } csvPrinter.printRecord(row.toArray()); for (int year=2017; year<=2019; year++) { for (int month=Calendar.JANUARY; month<=Calendar.DECEMBER; month++) { String monthStr = "" + month; if (monthStr.length() < 2) { monthStr = "0" + monthStr; } Date monthStart = sdf.parse("" + year + monthStr + "01"); Calendar cal = Calendar.getInstance(); cal.setTime(monthStart); cal.add(Calendar.MONTH, 1); cal.add(Calendar.DAY_OF_YEAR, -1); Date monthEnd = cal.getTime(); row = new ArrayList<>(); row.add("" + year); row.add("" + (month+1)); for (String odsCode: toks) { Date startDate = hmEarliestDate.get(odsCode); if (startDate == null || startDate.after(monthStart)) { row.add(""); } else { int changes = 0; Map<Date, List<UUID>> hmChanges = hmCounts.get(odsCode); if (hmChanges != null) { for (Date d : hmChanges.keySet()) { if (!d.before(monthStart) && !d.after(monthEnd)) { List<UUID> uuids = hmChanges.get(d); changes += uuids.size(); } } } row.add("" + changes); } } csvPrinter.printRecord(row.toArray()); } } csvPrinter.close(); LOG.info("Finished counting NHS number changes for " + odsCodes); } catch (Throwable t) { LOG.error("", t); } }*/ private static void createDigest(String url, String user, String pass, String table, String columnFrom, String columnTo, String base64Salt, String validNhsNumberCol) { LOG.info("Creating Digest value from " + table + "." + columnFrom + " -> " + columnTo); try { byte[] saltBytes = Base64.getDecoder().decode(base64Salt); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT DISTINCT " + columnFrom + " FROM " + table; PreparedStatement psSelect = conn.prepareStatement(sql); psSelect.setFetchSize(1000); Connection conn2 = DriverManager.getConnection(url, props); if (validNhsNumberCol != null) { sql = "UPDATE " + table + " SET " + validNhsNumberCol + " = ?, " + columnTo + " = ? WHERE " + columnFrom + " = ?"; } else { sql = "UPDATE " + table + " SET " + columnTo + " = ? WHERE " + columnFrom + " = ?"; } PreparedStatement psUpdate = conn2.prepareStatement(sql); Connection conn3 = DriverManager.getConnection(url, props); if (validNhsNumberCol != null) { sql = "UPDATE " + table + " SET " + validNhsNumberCol + " = ?, " + columnTo + " = ? WHERE " + columnFrom + " IS NULL"; } else { sql = "UPDATE " + table + " SET " + columnTo + " = ? WHERE " + columnFrom + " IS NULL"; } PreparedStatement psUpdateNull = conn3.prepareStatement(sql); LOG.trace("Starting query"); ResultSet rs = psSelect.executeQuery(); LOG.trace("Query results back"); int done = 0; int batchSize = 0; while (rs.next()) { Object o = rs.getObject(1); String value = ""; if (o != null) { value = o.toString(); } TreeMap<String, String> keys = new TreeMap<>(); keys.put("DoesntMatter", value); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); String pseudoId = crypto.GetDigest(keys); if (o == null) { int col = 1; if (validNhsNumberCol != null) { int validNhsNunmber = isValidNhsNumber(value); psUpdateNull.setInt(col++, validNhsNunmber); } psUpdateNull.setString(col++, pseudoId); psUpdateNull.executeUpdate(); } else { int col = 1; if (validNhsNumberCol != null) { int validNhsNunmber = isValidNhsNumber(value); psUpdate.setInt(col++, validNhsNunmber); } psUpdate.setString(col++, pseudoId); psUpdate.setString(col++, value); psUpdate.addBatch(); batchSize++; if (batchSize >= 10) { psUpdate.executeBatch(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done); } } if (batchSize >= 0) { psUpdate.executeBatch(); } rs.close(); psSelect.close(); psUpdate.close(); psUpdateNull.close(); conn.close(); conn2.close(); conn3.close(); LOG.debug("Done " + done); LOG.info("Finished Creating Digest value from " + table + "." + columnFrom + " -> " + columnTo); } catch (Throwable t) { LOG.error("", t); } } private static int isValidNhsNumber(String fieldValue) { if (fieldValue == null) { return -1; } if (fieldValue.isEmpty()) { return -1; } if (fieldValue.length() != 10) { return 0; } int sum = 0; char[] chars = fieldValue.toCharArray(); for (int i=0; i<9; i++) { char c = chars[i]; if (!Character.isDigit(c)) { return 0; } int val = Character.getNumericValue(c); int weight = 10 - i; int m = val * weight; sum += m; //LOG.trace("" + c + " x " + weight + " = " + m + " sum = " + sum); } int remainder = sum % 11; int check = 11 - remainder; //LOG.trace("sum = " + sum + " mod 11 = " + remainder + " check = " + check); if (check == 11) { check = 0; } if (check == 10) { return 0; } char lastChar = chars[9]; int actualCheck = Character.getNumericValue(lastChar); if (check != actualCheck) { return 0; } return 1; } /*private static void checkForBartsMissingFiles(String sinceDate) { LOG.info("Checking for Barts missing files"); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); Date start2019 = sdf.parse(sinceDate); LOG.info("Checking files since " + sinceDate); UUID serviceId = UUID.fromString("b5a08769-cbbe-4093-93d6-b696cd1da483"); UUID systemId = UUID.fromString("e517fa69-348a-45e9-a113-d9b59ad13095"); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); Map<String, List<String>> hmByFileType = new HashMap<>(); Map<String, Date> hmReceivedDate = new HashMap<>(); for (Exchange exchange: exchanges) { String body = exchange.getBody(); //skip any exchanges pre-2019 Date d = exchange.getHeaderAsDate(HeaderKeys.DataDate); if (d.before(start2019)) { continue; } List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(body); for (ExchangePayloadFile file: files) { String type = file.getType(); String path = file.getPath(); String name = FilenameUtils.getName(path); List<String> l = hmByFileType.get(type); if (l == null) { l = new ArrayList<>(); hmByFileType.put(type, l); } l.add(name); hmReceivedDate.put(name, d); } } LOG.info("Parsed exchange bodies"); List<String> types = new ArrayList<>(hmByFileType.keySet()); types.sort((o1, o2) -> o1.compareToIgnoreCase(o2)); for (String type: types) { List<String> files = hmByFileType.get(type); LOG.info("---------------------------------------------------------------------"); LOG.info("Checking " + type + " with " + files.size()); if (type.equals("MaternityServicesDataSet") || type.equals("SusEmergency") || type.equals("SusEmergencyTail")) { continue; } if (type.equals("CriticalCare")) { //cc_BH_192575_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("Diagnosis")) { //rnj_pc_diag_20190330-011515.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("HomeDeliveryAndBirth")) { //hdb_BH_192576_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("MaternityBirth")) { //GETL_MAT_BIRTH_2019-03-30_001020_1431392750.txt checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyy-MM-dd", "_", 3); } else if (type.equals("Pregnancy")) { //GETL_MAT_PREG_2019-03-30_001020_1431392781.txt checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyy-MM-dd", "_", 3); } else if (type.equals("Problem")) { //rnj_pc_prob_20190328-011001.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("Procedure")) { //rnj_pc_proc_20180716-010530.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("SurginetCaseInfo")) { //spfit_sn_case_info_rnj_20190812-093823.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 5); } else if (type.equals("SusEmergencyCareDataSet")) { //susecd.190360 AND susecd_BH.190039 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusEmergencyCareDataSetTail")) { //tailecd_DIS.190362 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusInpatient")) { //ip_BH_193174_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("SusInpatientTail")) { //tailip_DIS.203225_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.|_", 2); } else if (type.equals("SusOutpatient")) { //susopa_BH.204612 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusOutpatientTail")) { //tailopa_DIS.204610 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusOutpatientTail")) { //tailopa_DIS.204610 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("APPSL2")) { //GETL_APPSL2_80130_RNJ_10072018_065345_1.TXT checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 4); } else if (type.equals("BlobContent")) { //Blob_Con_20190502_00198.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_", 2); } else if (type.equals("FamilyHistory")) { //Fam_Hist_20190417_00326.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_", 2); } else { String first = files.get(0); if (first.contains("_RNJ_")) { //CLEVE_80130_RNJ_15072018_045416_6.TXT checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 3); } else { //Blob_Con_20190328_00170.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 2); } } } LOG.info("Finished Checking for Barts missing files"); } catch (Throwable t) { LOG.error("", t); } } private static void checkForMissingFilesByDate(String fileType, Map<String, Date> hmReceivedDate, List<String> files, String dateFormat, String delimiter, int token) throws Exception { SimpleDateFormat sdf = new SimpleDateFormat(dateFormat); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); Date minDate = null; Date maxDate = null; Map<Date, List<String>> hmByDate = new HashMap<>(); for (String file: files) { String[] toks = file.split(delimiter); String tok = null; Date d = null; try { tok = toks[token]; d = sdf.parse(tok); } catch (ParseException pe) { LOG.error("Error parsing " + tok + " with format " + dateFormat + " toks " + toks, pe); return; } //LOG.debug("File " + file + " -> " + tok + " -> " + sdf.format(d)); if (minDate == null || d.before(minDate)) { minDate = d; } if (maxDate == null || d.after(maxDate)) { maxDate = d; } List<String> l = hmByDate.get(d); if (l == null) { l = new ArrayList<>(); hmByDate.put(d, l); } l.add(file); } LOG.info("Checking for date range " + sdfOutput.format(minDate) + " to " + sdfOutput.format(maxDate)); Calendar cal = Calendar.getInstance(); Date d = new Date(minDate.getTime()); while (!d.after(maxDate)) { List<String> l = hmByDate.get(d); if (l == null) { cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, -1); Date dateBefore = cal.getTime(); List<String> before = hmByDate.get(dateBefore); String beforeDesc = null; if (before != null) { String firstBefore = before.get(0); Date beforeReceived = hmReceivedDate.get(firstBefore); beforeDesc = firstBefore + " on " + sdfOutput.format(beforeReceived); } cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, 1); Date dateAfter = cal.getTime(); List<String> after = hmByDate.get(dateAfter); String afterDesc = null; if (after != null) { String firstAfter = after.get(0); Date afterReceived = hmReceivedDate.get(firstAfter); afterDesc = firstAfter + " on " + sdfOutput.format(afterReceived); } LOG.error("No " + fileType + " file found for " + sdfOutput.format(d) + " previous [" + beforeDesc + "] after [" + afterDesc + "]"); } cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, 1); d = cal.getTime(); } } private static void checkForMissingFilesByNumber(String fileType, Map<String, Date> hmReceivedDate, List<String> files, String delimiter, int token) { SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); int maxNum = 0; int minNum = Integer.MAX_VALUE; Map<Integer, List<String>> hmByNum = new HashMap<>(); for (String file: files) { String[] toks = file.split(delimiter); String tok = null; int num = 0; try { tok = toks[token]; num = Integer.parseInt(tok); } catch (Exception ex) { LOG.error("Exception with " + file + " tok = [" + tok + "] and toks " + toks , ex); return; } maxNum = Math.max(num, maxNum); minNum = Math.min(num, minNum); List<String> l = hmByNum.get(new Integer(num)); if (l == null) { l = new ArrayList<>(); hmByNum.put(new Integer(num), l); } l.add(file); } LOG.info("Checking for number range " + minNum + " to " + maxNum); for (int i=minNum; i<=maxNum; i++) { List<String> l = hmByNum.get(new Integer(i)); if (l == null) { List<String> before = hmByNum.get(new Integer(i-1)); String beforeDesc = null; if (before != null) { String firstBefore = before.get(0); Date beforeReceived = hmReceivedDate.get(firstBefore); beforeDesc = firstBefore + " on " + sdfOutput.format(beforeReceived); } List<String> after = hmByNum.get(new Integer(i+1)); String afterDesc = null; if (after != null) { String firstAfter = after.get(0); Date afterReceived = hmReceivedDate.get(firstAfter); afterDesc = firstAfter + " on " + sdfOutput.format(afterReceived); } LOG.error("No " + fileType + " file found for " + i + " previous [" + beforeDesc + "] after [" + afterDesc + "]"); } } }*/ /*private static void deleteEnterpriseObs(String filePath, String configName, int batchSize) { LOG.info("Deleting Enterprise Observations"); try { String parent = FilenameUtils.getFullPath(filePath); String name = FilenameUtils.getName(filePath); String doneFilePath = FilenameUtils.concat(parent, "DONE" + name); Set<String> doneIds = new HashSet<>(); File f = new File(doneFilePath); if (f.exists()) { List<String> doneLines = Files.readAllLines(f.toPath()); for (String doneLine: doneLines) { doneIds.add(doneLine); } } LOG.debug("Previously done " + doneIds.size()); List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(configName); CSVParser parser = CSVParser.parse(new File(filePath), Charset.defaultCharset(), CSVFormat.TDF.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); List<String> batch = new ArrayList<>(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String id = record.get("id"); if (doneIds.contains(id)) { continue; } doneIds.add(id); batch.add(id); if (batch.size() >= batchSize) { saveBatch(batch, connectionWrappers, doneFilePath); } if (doneIds.size() % 1000 == 0) { LOG.debug("Done " + doneIds.size()); } } if (!batch.isEmpty()) { saveBatch(batch, connectionWrappers, doneFilePath); LOG.debug("Done " + doneIds.size()); } parser.close(); LOG.info("Finished Deleting Enterprise Observations"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveBatch(List<String> batch, List<EnterpriseConnector.ConnectionWrapper> connectionWrappers, String doneFilePath) throws Exception { for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { String sql = "DELETE FROM observation WHERE id = ?"; Connection connection = connectionWrapper.getConnection(); PreparedStatement ps = connection.prepareStatement(sql); for (String id: batch) { ps.setLong(1, Long.parseLong(id)); ps.addBatch(); } ps.executeBatch(); connection.commit(); ps.close(); connection.close(); } //update audit Files.write(new File(doneFilePath).toPath(), batch, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); batch.clear(); } /*private static void testS3Listing(String path) { LOG.info("Testing S3 Listing"); try { LOG.info("Trying with full path: " + path); List<FileInfo> l = FileHelper.listFilesInSharedStorageWithInfo(path); LOG.info("Found " + l.size()); *//*for (FileInfo info: l) { LOG.info("Got " + info.getFilePath()); }*//* String parent = FilenameUtils.getFullPath(path); LOG.info("Trying with parent: " + parent); l = FileHelper.listFilesInSharedStorageWithInfo(parent); LOG.info("Found " + l.size()); *//*for (FileInfo info: l) { LOG.info("Got " + info.getFilePath()); }*//* LOG.info("Finished Testing S3 Listing"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testAuditingFile(UUID serviceId, UUID systemId, UUID exchangeId, String version, String filePath) { LOG.info("Testing Auditing File"); try { LOG.info("Creating parser"); //org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation obsParser = new org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation(serviceId, systemId, exchangeId, version, filePath); org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile obsParser = new org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile(serviceId, systemId, exchangeId, version, filePath); LOG.info("Created parser"); obsParser.nextRecord(); LOG.info("Done auditing"); obsParser.close(); LOG.info("Closed"); LOG.info("Finish Testing Auditing File"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void postPatientToProtocol(String odsCode, String patientUuid) { LOG.info("Posting patient " + patientUuid + " for " + odsCode + " to Protocol queue"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); UUID patientId = UUID.fromString(patientUuid); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); //exchanges are in order most recent first, so iterate backwards to get them in date order for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<UUID> batchesForPatient = new ArrayList<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch: batches) { if (batch.getEdsPatientId() != null && batch.getEdsPatientId().equals(patientId)) { batchesForPatient.add(batch.getBatchId()); } } if (!batchesForPatient.isEmpty()) { LOG.debug("Posting " + batchesForPatient.size() + " for exchange " + exchange.getId() + " to rabbit"); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchesForPatient.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished posting patient " + patientUuid + " for " + odsCode + " to Protocol queue"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void postPatientsToProtocol(UUID serviceId, UUID systemId, String sourceFile) { try { LOG.info("Posting patient from " + sourceFile + " for " + serviceId + " to Protocol queue"); Set<UUID> hsPatientUuids = new HashSet<>(); List<String> lines = Files.readAllLines(new File(sourceFile).toPath()); for (String line: lines) { hsPatientUuids.add(UUID.fromString(line)); } LOG.info("Found " + hsPatientUuids.size() + " patient IDs"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getById(serviceId); LOG.info("Service " + service.getId() + " -> " + service.getName()); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); //exchanges are in order most recent first, so iterate backwards to get them in date order for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<UUID> batchesForPatient = new ArrayList<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null && hsPatientUuids.contains(patientId)) { batchesForPatient.add(batch.getBatchId()); } } if (!batchesForPatient.isEmpty()) { LOG.debug("Posting " + batchesForPatient.size() + " for exchange " + exchange.getId() + " to rabbit"); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchesForPatient.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished posting patients from " + sourceFile + " for " + serviceId + " to Protocol queue"); } catch (Throwable t) { LOG.error("", t); } } /*private static void testXml() { LOG.info("Testing XML"); try { //PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Map<String, String> queueReadConfigs = ConfigManager.getConfigurations("queuereader"); for (String configId: queueReadConfigs.keySet()) { LOG.debug("Checking config XML for " + configId); String configXml = queueReadConfigs.get(configId); if (configXml.startsWith("{")) { LOG.debug("Skipping JSON"); continue; } try { ApiConfiguration config = ConfigWrapper.deserialise(configXml); //LOG.debug("Deserialised as messaging API XML"); ApiConfiguration.PostMessageAsync postConfig = config.getPostMessageAsync(); } catch (Exception ex) { try { QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); } catch (Exception ex2) { LOG.error(configXml); LOG.error("", ex2); } } } LOG.info("Testing XML"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testMetrics() { LOG.info("Testing Metrics"); try { Random r = new Random(System.currentTimeMillis()); while (true) { String metric1 = "frailty-api.ms-duration"; Integer value1 = new Integer(r.nextInt(1000)); MetricsHelper.recordValue(metric1, value1); if (r.nextBoolean()) { MetricsHelper.recordEvent("frailty-api.response-code-200"); } else { MetricsHelper.recordEvent("frailty-api.response-code-400"); } int sleep = r.nextInt(10 * 1000); LOG.debug("Waiting " + sleep + " ms"); Thread.sleep(sleep); } } catch (Throwable t) { LOG.error("", t); } } private static void testGraphiteMetrics(String host, String port) { LOG.info("Testing Graphite metrics to " + host + " " + port); try { InetAddress ip = InetAddress.getLocalHost(); String hostname = ip.getHostName(); LOG.debug("Hostname = " + hostname); String appId = ConfigManager.getAppId(); LOG.debug("AppID = " + appId); Random r = new Random(System.currentTimeMillis()); while (true) { Map<String, Object> metrics = new HashMap<>(); String metric1 = hostname + "." + appId + ".frailty-api.duration-ms"; Integer value1 = new Integer(r.nextInt(1000)); metrics.put(metric1, value1); String metric2 = hostname + "." + appId+ ".frailty-api.response-code"; Integer value2 = null; if (r.nextBoolean()) { value2 = new Integer(200); } else { value2 = new Integer(400); } metrics.put(metric2, value2); long timestamp = System.currentTimeMillis() / 1000; LOG.debug("Sending metrics"); sendMetrics(host, Integer.parseInt(port), metrics, timestamp); int sleep = r.nextInt(10 * 1000); LOG.debug("Waiting " + sleep + " ms"); Thread.sleep(sleep); } } catch (Throwable t) { LOG.error("", t); } } private static void sendMetrics(String graphiteHost, int graphitePort, Map<String, Object> metrics, long timeStamp) throws Exception { Socket socket = new Socket(graphiteHost, graphitePort); OutputStream s = socket.getOutputStream(); PrintWriter out = new PrintWriter(s, true); for (Map.Entry<String, Object> metric: metrics.entrySet()) { if (metric.getValue() instanceof Integer) { out.printf("%s %d %d%n", metric.getKey(), ((Integer)metric.getValue()).intValue(), timeStamp); } else if (metric.getValue() instanceof Float) { out.printf("%s %f %d%n", metric.getKey(), ((Float)metric.getValue()).floatValue(), timeStamp); } else { throw new RuntimeException("Unsupported type " + metric.getValue().getClass()); } } out.close(); socket.close(); }*/ /*private static void fixEmisDeletedPatients(String odsCode) { LOG.info("Fixing Emis Deleted Patients for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); Set<String> hsPatientGuidsDeductedDeceased = new HashSet<>(); Map<String, List<UUID>> hmPatientGuidsDeleted = new HashMap<>(); Map<String, List<String>> hmPatientGuidsToFix = new HashMap<>(); //exchanges are in REVERSE order (most recent first) for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient"); if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } ExchangePayloadFile agreementFile = findFileOfType(files, "Agreements_SharingOrganisation"); if (agreementFile == null) { throw new Exception("Failed to find Agreements_SharingOrganisation file in exchange " + exchange.getId()); } //work out file version List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); filesTmp.add(agreementFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); //see if sharing agreement is disabled String path = agreementFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation agreementParser = new org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation(serviceId, systemId, exchange.getId(), version, path); agreementParser.nextRecord(); CsvCell disabled = agreementParser.getDisabled(); boolean isDisabled = disabled.getBoolean(); //create the parser path = patientFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); CsvCell dateOfDeathCell = parser.getDateOfDeath(); CsvCell dateOfDeductionCell = parser.getDateOfDeactivation(); CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { List<UUID> exchangesDeleted = hmPatientGuidsDeleted.get(patientGuid); if (exchangesDeleted == null) { exchangesDeleted = new ArrayList<>(); hmPatientGuidsDeleted.put(patientGuid, exchangesDeleted); } exchangesDeleted.add(exchange.getId()); //if this patient was previously updated with a deduction date or date of death, and the sharing //agreement isn't disabled, then we will have deleted them and need to undelete if (hsPatientGuidsDeductedDeceased.contains(patientGuid) && !isDisabled) { List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid); if (exchangesToFix == null) { exchangesToFix = new ArrayList<>(); hmPatientGuidsToFix.put(patientGuid, exchangesToFix); } exchangesToFix.add(exchange.getId().toString() + ": Deducted/Dead and Deleted after"); } } else { //if the date of death of deduction is set then we need to track this //because we're going to possibly get a delete in a years time if (!dateOfDeathCell.isEmpty() || !dateOfDeductionCell.isEmpty()) { hsPatientGuidsDeductedDeceased.add(patientGuid); } else { hsPatientGuidsDeductedDeceased.remove(patientGuid); } //if this patient was previously deleted and is now UN-deleted, then we'll //need to fix the record if (hmPatientGuidsDeleted.containsKey(patientGuid)) { List<UUID> exchangesDeleted = hmPatientGuidsDeleted.remove(patientGuid); List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid); if (exchangesToFix == null) { exchangesToFix = new ArrayList<>(); hmPatientGuidsToFix.put(patientGuid, exchangesToFix); } for (UUID exchangeId: exchangesDeleted) { exchangesToFix.add(exchangeId.toString() + ": Deleted and subsequently undeleted"); } } } } parser.close(); } LOG.info("Finished checking for affected patients - found " + hmPatientGuidsToFix.size() + " patients to fix"); for (String patientGuid: hmPatientGuidsToFix.keySet()) { List<String> exchangeIds = hmPatientGuidsToFix.get(patientGuid); LOG.info("Patient " + patientGuid); for (String exchangeId: exchangeIds) { LOG.info(" Exchange Id " + exchangeId); } //log out the UUID for the patient too EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, null, null, false, null); Reference ref = ReferenceHelper.createReference(ResourceType.Patient, patientGuid); ref = IdHelper.convertLocallyUniqueReferenceToEdsReference(ref, csvHelper); LOG.debug(" Patient UUID " + ref.getReference()); String patientUuidStr = ReferenceHelper.getReferenceId(ref); UUID patientUuid = UUID.fromString(patientUuidStr); Set<UUID> hsExchangeIdsDone = new HashSet<>(); Set<String> resourcesDone = new HashSet<>(); for (String exchangeId: exchangeIds) { UUID exchangeUuid = UUID.fromString(exchangeId.split(":")[0]); //in some cases, the same exchange was found twice if (hsExchangeIdsDone.contains(exchangeUuid)) { continue; } hsExchangeIdsDone.add(exchangeUuid); Exchange exchange = exchangeDal.getExchange(exchangeUuid); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> batchIdsCreated = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler filer = new FhirResourceFiler(exchangeUuid, serviceId, systemId, transformError, batchIdsCreated); //get all exchange batches for our patient List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeUuid); for (ExchangeBatch batch: batches) { UUID batchPatient = batch.getEdsPatientId(); if (batchPatient == null || !batchPatient.equals(patientUuid)) { continue; } //get all resources for this batch List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesForBatch(serviceId, batch.getBatchId()); //restore each resource for (ResourceWrapper resourceWrapper: resourceWrappers) { //if an exchange was processed multiple times, we might try to pick up the same resource twice, so skip it String resourceRef = ReferenceHelper.createResourceReference(resourceWrapper.getResourceType(), resourceWrapper.getResourceId().toString()); if (resourcesDone.contains(resourceRef)) { continue; } resourcesDone.add(resourceRef); List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceWrapper.getResourceType(), resourceWrapper.getResourceId()); //most recent is first ResourceWrapper mostRecent = history.get(0); if (!mostRecent.isDeleted()) { continue; } //find latest non-deleted version and save it over the deleted version for (ResourceWrapper historyItem: history) { if (!historyItem.isDeleted()) { org.hl7.fhir.instance.model.Resource resource = FhirSerializationHelper.deserializeResource(historyItem.getResourceData()); GenericBuilder builder = new GenericBuilder(resource); filer.savePatientResource(null, false, builder); break; } } } } filer.waitToFinish(); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Fixing Emis Deleted Patients for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ private static ExchangePayloadFile findFileOfType(List<ExchangePayloadFile> files, String fileType) { for (ExchangePayloadFile file: files) { if (file.getType().equals(fileType)) { return file; } } return null; } /*private static void fixEmisEpisodes2(String odsCode) { LOG.info("Fixing Emis Episodes (2) for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal(); Set<String> patientGuidsDone = new HashSet<>(); //exchanges are in REVERSE order (most recent first) for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = null; for (ExchangePayloadFile file: files) { if (file.getType().equals("Admin_Patient")) { patientFile = file; break; } } if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } String path = patientFile.getPath(); List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { continue; } //skip patients already done CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); if (patientGuidsDone.contains(patientGuid)) { continue; } patientGuidsDone.add(patientGuid); //check we've not already converted this patient previously (i.e. re-running this conversion) CsvCell startDateCell = parser.getDateOfRegistration(); if (startDateCell.isEmpty()) { LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId()); startDateCell = CsvCell.factoryDummyWrapper("1900-01-01"); } //save internal ID map String key = patientGuidCell.getString(); String value = startDateCell.getString(); internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value); } parser.close(); } LOG.info("Finished Fixing Emis Episodes (2) for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisEpisodes1(String odsCode) { LOG.info("Fixing Emis Episodes (1) for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal(); Set<String> patientGuidsDone = new HashSet<>(); //exchanges are in REVERSE order (most recent first) for (Exchange exchange: exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = null; for (ExchangePayloadFile file: files) { if (file.getType().equals("Admin_Patient")) { patientFile = file; break; } } if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } String path = patientFile.getPath(); List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { continue; } //skip patients already done CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); if (patientGuidsDone.contains(patientGuid)) { continue; } patientGuidsDone.add(patientGuid); //check we've not already converted this patient previously (i.e. re-running this conversion) String key = patientGuidCell.getString(); String existingIdMapValue = internalIdDal.getDestinationId(serviceId, "Emis_Latest_Reg_Date", key); if (existingIdMapValue != null) { continue; } CsvCell startDateCell = parser.getDateOfRegistration(); if (startDateCell.isEmpty()) { LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId()); startDateCell = CsvCell.factoryDummyWrapper("1900-01-01"); } //find the existing UUID we've previously allocated String oldSourceId = patientGuid; UUID episodeUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.EpisodeOfCare, oldSourceId); if (episodeUuid == null) { LOG.error("Null episode UUID for old source ID " + oldSourceId + " in exchange " + exchange.getId()); continue; } //save ID reference mapping String newSourceId = patientGuid + ":" + startDateCell.getString(); UUID newEpisodeUuid = IdHelper.getOrCreateEdsResourceId(serviceId, ResourceType.EpisodeOfCare, newSourceId, episodeUuid); if (!newEpisodeUuid.equals(episodeUuid)) { throw new Exception("Failed to carry over UUID for episode. Old UUID was " + episodeUuid + " new UUID is " + newEpisodeUuid + " in exchange " + exchange.getId()); } //save internal ID map String value = startDateCell.getString(); internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value); } parser.close(); } LOG.info("Finished Fixing Emis Episodes (1) for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testRabbit(String nodes, String username, String password, String sslProtocol, String exchangeName, String queueName) { LOG.info("Testing RabbitMQ Connectivity on " + nodes); LOG.info("SSL Protocol = " + sslProtocol); LOG.info("Exchange = " + exchangeName); LOG.info("Queue = " + queueName); try { //test publishing LOG.info("Testing publishing..."); com.rabbitmq.client.Connection publishConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol); Channel publishChannel = org.endeavourhealth.core.queueing.ConnectionManager.getPublishChannel(publishConnection, exchangeName); publishChannel.confirmSelect(); for (int i=0; i<5; i++) { Map<String, Object> headers = new HashMap<>(); headers.put("HeaderIndex", "" + i); AMQP.BasicProperties properties = new AMQP.BasicProperties() .builder() .deliveryMode(2) // Persistent message .headers(headers) .build(); String body = "MessageIndex = " + i; byte[] bytes = body.getBytes(); publishChannel.basicPublish( exchangeName, "All", //routing key properties, bytes); } publishChannel.close(); publishConnection.close(); LOG.info("...Finished testing publishing"); //test consuming LOG.info("Testing reading..."); com.rabbitmq.client.Connection readConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol); Channel readChannel = readConnection.createChannel(); readChannel.basicQos(1); Consumer consumer = new TestRabbitConsumer(readChannel); readChannel.basicConsume(queueName, false, "TestRabbitConsumer", false, true, null, consumer); LOG.info("Reader Connected (ctrl+c to close) will quit in 30s"); Thread.sleep(30 * 1000); LOG.info("Finished Testing RabbitMQ Connectivity"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void populateLastDataDate(int threads, int batchSize) { LOG.debug("Populating last data date"); try { int processed = 0; AtomicInteger fixed = new AtomicInteger(); ThreadPool threadPool = new ThreadPool(threads, batchSize); while (true) { String sql = "SELECT id FROM drewtest.exchange_ids WHERE done = 0 LIMIT " + batchSize; //LOG.debug("Getting new batch using: " + sql); EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager(); SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate(); Connection auditConnection = auditSession.connection(); Statement statement = auditConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); List<UUID> exchangeIds = new ArrayList<>(); while (rs.next()) { String s = rs.getString(1); //LOG.debug("Got back exchange ID " + s); exchangeIds.add(UUID.fromString(s)); } rs.close(); statement.close(); auditEntityManager.close(); for (UUID exchangeId: exchangeIds) { threadPool.submit(new PopulateDataDateCallable(exchangeId, fixed)); } List<ThreadPoolError> errs = threadPool.waitUntilEmpty(); if (!errs.isEmpty()) { LOG.debug("Got " + errs.size() + " errors"); for (ThreadPoolError err: errs) { LOG.error("", err.getException()); } break; } processed += exchangeIds.size(); LOG.debug("processed " + processed + " fixed " + fixed.get()); //if finished if (exchangeIds.size() < batchSize) { break; } } threadPool.waitAndStop(); LOG.debug("Finished Populating last data date"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisMissingSlots(String serviceOdsCode) { LOG.debug("Fixing Emis Missing Slots for " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); Set<String> hsSlotsToSkip = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); File auditFile = new File("SlotAudit_" + serviceOdsCode + ".csv"); LOG.debug("Auditing to " + auditFile); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); if (exchangeConfig == null) { throw new Exception("Failed to find PostMessageToExchange config details for exchange EdsProtocol"); } //the list of exchanges is most-recent-first, so iterate backwards to do them in order for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(systemId); transformAudit.setExchangeId(exchange.getId()); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); String version = EmisCsvToFhirTransformer.determineVersion(files); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), null, processPatientData, null); //the processor is responsible for saving FHIR resources TransformError transformError = new TransformError(); List<UUID> batchIdsCreated = new ArrayList<>(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(exchange.getId(), serviceId, systemId, transformError, batchIdsCreated); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchange.getId(), files, version, parsers); try { //cache the practitioners for each session SessionUserTransformer.transform(parsers, fhirResourceFiler, csvHelper); Slot parser = (Slot) parsers.get(Slot.class); while (parser.nextRecord()) { //should this record be transformed? //the slots CSV contains data on empty slots too; ignore them CsvCell patientGuid = parser.getPatientGuid(); if (patientGuid.isEmpty()) { continue; } //the EMIS data contains thousands of appointments that refer to patients we don't have, so I'm explicitly //handling this here, and ignoring any Slot record that is in this state UUID patientEdsId = IdHelper.getEdsResourceId(fhirResourceFiler.getServiceId(), ResourceType.Patient, patientGuid.getString()); if (patientEdsId == null) { continue; } //see if this appointment has previously been transformed CsvCell slotGuid = parser.getSlotGuid(); String uniqueId = patientGuid.getString() + ":" + slotGuid.getString(); if (!hsSlotsToSkip.contains(uniqueId)) { //transform this slot record if no appt already exists for it boolean alreadyExists = false; UUID discoveryId = IdHelper.getEdsResourceId(serviceId, ResourceType.Slot, uniqueId); if (discoveryId != null) { List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Slot.toString(), discoveryId); if (!history.isEmpty()) { alreadyExists = true; } } if (alreadyExists) { hsSlotsToSkip.add(uniqueId); } } if (hsSlotsToSkip.contains(uniqueId)) { continue; } hsSlotsToSkip.add(uniqueId); try { LOG.debug("Creating slot for " + uniqueId); SlotTransformer.createSlotAndAppointment((Slot) parser, fhirResourceFiler, csvHelper); } catch (Exception ex) { fhirResourceFiler.logTransformRecordError(ex, parser.getCurrentState()); } } csvHelper.clearCachedSessionPractitioners(); fhirResourceFiler.failIfAnyErrors(); fhirResourceFiler.waitToFinish(); } catch (Throwable ex) { Map<String, String> args = new HashMap<>(); args.put(TransformErrorUtility.ARG_FATAL_ERROR, ex.getMessage()); TransformErrorUtility.addTransformError(transformError, ex, args); LOG.error("", ex); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIdsCreated.size())); if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); } //save our audit if something went wrong or was saved if (transformError.getError().size() > 0 || !batchIdsCreated.isEmpty()) { exchangeDal.save(transformAudit); } //send to Rabbit protocol queue if (!batchIdsCreated.isEmpty()) { //write batch ID to file, so we have an audit of what we created List<String> lines = new ArrayList<>(); for (UUID batchId : batchIdsCreated) { lines.add("\"" + exchange.getId() + "\",\"" + batchId + "\""); } Files.write(auditFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); String batchesJson = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchesJson); //send to Rabbit PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } if (transformError.getError().size() > 0) { throw new Exception("Dropping out due to error in transform"); } } LOG.debug("Finished Fixing Emis Missing Slots for " + serviceOdsCode); } catch (Throwable t) { LOG.error("", t); } } private static void findBartsPersonIds(String sourceFile, UUID serviceUuid, UUID systemUuid, String dateCutoffStr, String destFile) { LOG.debug("Finding Barts person IDs for " + sourceFile); try { //read NHS numbers into memory Set<String> hsNhsNumbers = new HashSet<>(); List<String> listNhsNumbers = new ArrayList<>(); File src = new File(sourceFile); List<String> lines = Files.readAllLines(src.toPath()); for (String line : lines) { String s = line.trim(); hsNhsNumbers.add(s); listNhsNumbers.add(s); //maintain a list so we can preserve the ordering } LOG.debug("Looking for Person IDs for " + hsNhsNumbers.size() + " nhs numbers or any since " + dateCutoffStr); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date dateCutoff = sdf.parse(dateCutoffStr); Map<String, Set<String>> hmMatches = new HashMap<>(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile file : files) { String parentPath = new File(file.getPath()).getParent(); String parentDir = FilenameUtils.getBaseName(parentPath); Date extractDate = sdf.parse(parentDir); boolean inDateRange = !extractDate.before(dateCutoff); String type = file.getType(); if (type.equals("PPATI")) { PPATI parser = new PPATI(null, null, null, null, file.getPath()); while (parser.nextRecord()) { CsvCell nhsNumberCell = parser.getNhsNumber(); String nhsNumber = nhsNumberCell.getString(); nhsNumber = nhsNumber.replace("-", ""); if (hsNhsNumbers.contains(nhsNumber) || inDateRange) { CsvCell personIdCell = parser.getMillenniumPersonId(); String personId = personIdCell.getString(); Set<String> s = hmMatches.get(nhsNumber); if (s == null) { s = new HashSet<>(); hmMatches.put(nhsNumber, s); } s.add(personId); } } parser.close(); } else if (type.equals("PPALI")) { PPALI parser = new PPALI(null, null, null, null, file.getPath()); while (parser.nextRecord()) { CsvCell aliasCell = parser.getAlias(); //not going to bother trying to filter on alias type, since it won't hurt to include //extra patients, if they have an MRN that accidentally matches one of the NHS numbers being searched for String alias = aliasCell.getString(); if (hsNhsNumbers.contains(alias) || inDateRange) { //NHS numbers in PPALI don't have the extra hyphens CsvCell personIdCell = parser.getMillenniumPersonIdentifier(); String personId = personIdCell.getString(); Set<String> s = hmMatches.get(alias); if (s == null) { s = new HashSet<>(); hmMatches.put(alias, s); } s.add(personId); } } parser.close(); } else { //just ignore other file types } } } LOG.debug("" + hmMatches.size() + " / " + hsNhsNumbers.size() + " NHS numbers had person IDs found"); List<String> newLines = new ArrayList<>(); for (String nhsNumber : listNhsNumbers) { Set<String> personIds = hmMatches.get(nhsNumber); if (personIds == null) { LOG.error("Failed to find person ID for " + nhsNumber); continue; } newLines.add("#NHS " + nhsNumber + ":"); for (String personId : personIds) { newLines.add(personId); } } File dst = new File(destFile); if (dst.exists()) { dst.delete(); } Files.write(dst.toPath(), newLines); LOG.debug("Finished Finding Barts person IDs for " + sourceFile); } catch (Throwable t) { LOG.error("", t); } }*/ private static void createEmisDataTables() { LOG.debug("Creating Emis data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("Admin_Location"); fileTypes.add("Admin_OrganisationLocation"); fileTypes.add("Admin_Organisation"); fileTypes.add("Admin_Patient"); fileTypes.add("Admin_UserInRole"); fileTypes.add("Agreements_SharingOrganisation"); fileTypes.add("Appointment_SessionUser"); fileTypes.add("Appointment_Session"); fileTypes.add("Appointment_Slot"); fileTypes.add("CareRecord_Consultation"); fileTypes.add("CareRecord_Diary"); fileTypes.add("CareRecord_ObservationReferral"); fileTypes.add("CareRecord_Observation"); fileTypes.add("CareRecord_Problem"); fileTypes.add("Coding_ClinicalCode"); fileTypes.add("Coding_DrugCode"); fileTypes.add("Prescribing_DrugRecord"); fileTypes.add("Prescribing_IssueRecord"); fileTypes.add("Audit_PatientAudit"); fileTypes.add("Audit_RegistrationAudit"); for (String fileType : fileTypes) { createEmisDataTable(fileType); } LOG.debug("Finished Creating Emis data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createEmisDataTable(String fileType) throws Exception { ParserI parser = createParserForEmisFileType(fileType, null); if (parser == null) { return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; sql += ", "; sql += "extract_date datetime"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser) parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field : fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } /*private static void convertFhirAudits(String publisherConfigName, int threads, int batchSize) throws Exception { LOG.info("Converting FHIR audit for " + publisherConfigName); try { //find a suitable service ID UUID dummyServiceId = null; ServiceDalI serviceDal = DalProvider.factoryServiceDal(); for (Service s : serviceDal.getAll()) { if (s.getPublisherConfigName() != null && s.getPublisherConfigName().equalsIgnoreCase(publisherConfigName)) { dummyServiceId = s.getId(); LOG.info("Found sample service ID " + s.getId() + " " + s.getName() + " " + s.getLocalId()); break; } } EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //ensure all source files are mapped to published files LOG.debug("Mapping source files to published files"); String sql = "SELECT id, service_id, system_id, file_path, exchange_id, description" + " FROM source_file_mapping" + " WHERE new_published_file_id IS NULL"; PreparedStatement ps = connection.prepareStatement(sql); List<FileDesc> fileDescs = new ArrayList<>(); ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; FileDesc f = new FileDesc(); f.id = rs.getInt(col++); f.serviceId = UUID.fromString(rs.getString(col++)); f.systemId = UUID.fromString(rs.getString(col++)); f.filePath = rs.getString(col++); f.exchangeId = UUID.fromString(rs.getString(col++)); f.fileDesc = rs.getString(col++); fileDescs.add(f); } ps.close(); entityManager.close(); LOG.debug("Found " + fileDescs.size() + " files to map"); List<FileDesc> batch = new ArrayList<>(); for (int i = 0; i < fileDescs.size(); i++) { FileDesc f = fileDescs.get(i); Integer newFileAuditId = auditParser(f.serviceId, f.systemId, f.exchangeId, f.filePath, f.fileDesc); if (newFileAuditId == null) { continue; } f.newId = newFileAuditId; batch.add(f); if (batch.size() >= batchSize || i + 1 >= fileDescs.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); sql = "UPDATE source_file_mapping" + " SET new_published_file_id = ?" + " WHERE id = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (FileDesc toSave : batch) { int col = 1; ps.setInt(col++, toSave.newId); ps.setInt(col++, toSave.id); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); } if (i % 100 == 0) { LOG.debug("Audited " + i + " files"); } } LOG.info("Finished Converting FHIR audit for " + publisherConfigName); } catch (Throwable t) { LOG.error("", t); } }*/ private static UUID findSuitableServiceIdForPublisherConfig(String publisherConfigName) throws Exception { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); for (Service s: serviceDal.getAll()) { if (s.getPublisherConfigName() != null && s.getPublisherConfigName().equalsIgnoreCase(publisherConfigName)) { return s.getId(); } } throw new Exception("Failed to find suitable service ID for publisher [" + publisherConfigName + "]"); } private static void convertFhirAudits2(String publisherConfigName, String tempTable, int threads, int batchSize, boolean testMode) throws Exception { LOG.info("Converting FHIR audit for " + publisherConfigName); try { //find a suitable service ID UUID dummyServiceId = findSuitableServiceIdForPublisherConfig(publisherConfigName); ThreadPool threadPool = new ThreadPool(threads, 1000); int done = 0; while (true) { String sql = "SELECT c.resource_id, c.resource_type, c.created_at, m.version, m.mappings_json" + " FROM " + tempTable + " c" + " INNER JOIN resource_field_mappings m" + " ON c.resource_id = m.resource_id" + " AND c.resource_type = m.resource_type" + " AND c.created_at = m.created_at" + " WHERE c.done = false" + " LIMIT " + batchSize; Map<ResourceWrapper, ResourceFieldMappingAudit> map = new HashMap<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; ResourceWrapper r = new ResourceWrapper(); r.setResourceId(UUID.fromString(rs.getString(col++))); r.setResourceType(rs.getString(col++)); r.setCreatedAt(new Date(rs.getTimestamp(col++).getTime())); r.setVersion(UUID.fromString(rs.getString(col++))); ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(rs.getString(col++)); map.put(r, audit); } ps.close(); entityManager.close(); boolean lastOne = map.size() < batchSize; for (ResourceWrapper r: map.keySet()) { ResourceFieldMappingAudit audit = map.get(r); ConvertFhirAuditCallable c = new ConvertFhirAuditCallable(testMode, dummyServiceId, audit, r); List<ThreadPoolError> errors = threadPool.submit(c); handleErrors(errors); } //now save everything List<ThreadPoolError> errors = threadPool.waitUntilEmpty(); handleErrors(errors); done += map.size(); if (!testMode) { entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); //save all audits sql = "UPDATE resource_field_mappings" + " SET mappings_json = ?" + " WHERE resource_id = ?" + " AND resource_type = ?" + " AND created_at = ?" + " AND version = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (ResourceWrapper r : map.keySet()) { ResourceFieldMappingAudit audit = map.get(r); String auditJson = audit.writeToJson(); int col = 1; ps.setString(col++, auditJson); ps.setString(col++, r.getResourceId().toString()); ps.setString(col++, r.getResourceType()); ps.setTimestamp(col++, new Timestamp(r.getCreatedAt().getTime())); ps.setString(col++, r.getVersion().toString()); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); //mark temp table as done sql = "UPDATE " + tempTable + " SET done = true" + " WHERE done = false" + " AND resource_id = ?" + " AND resource_type = ?" + " AND created_at = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (ResourceWrapper r : map.keySet()) { int col = 1; ps.setString(col++, r.getResourceId().toString()); ps.setString(col++, r.getResourceType()); ps.setTimestamp(col++, new Timestamp(r.getCreatedAt().getTime())); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); } if (done % 1000 == 0) { LOG.info("Done " + done); } if (lastOne || testMode) { break; } } LOG.info("Done " + done); LOG.info("Finished Converting FHIR audit for " + publisherConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void handleErrors(List<ThreadPoolError> errors) throws Exception { if (errors == null || errors.isEmpty()) { return; } //if we've had multiple errors, just throw the first one, since they'll most-likely be the same ThreadPoolError first = errors.get(0); Throwable cause = first.getException(); //the cause may be an Exception or Error so we need to explicitly //cast to the right type to throw it without changing the method signature if (cause instanceof Exception) { throw (Exception)cause; } else if (cause instanceof Error) { throw (Error)cause; } } static class ConvertFhirAuditCallable implements Callable { private Map<String, UUID> hmPublishers = null; private ResourceWrapper r; private ResourceFieldMappingAudit audit; private UUID dummyServiceId; private boolean testMode; public ConvertFhirAuditCallable(boolean testMode, UUID dummyServiceId, ResourceFieldMappingAudit audit, ResourceWrapper r) { this.testMode = testMode; this.dummyServiceId = dummyServiceId; this.audit = audit; this.r = r; } @Override public Object call() throws Exception { String auditJson = audit.writeToJson(); List<ResourceFieldMappingAudit.ResourceFieldMappingAuditRow> auditRows = audit.getAudits(); for (ResourceFieldMappingAudit.ResourceFieldMappingAuditRow auditRow: auditRows) { Long oldStyleAuditId = auditRow.getOldStyleAuditId(); //got some records with a mix of old and new-style audits so skip any rows that are new-style if (oldStyleAuditId == null) { continue; } //need to work out if it's one of the audits where the record ID is potentially on a different server boolean isPotentiallyOnAnotherServer = false; String desiredFileName = null; for (ResourceFieldMappingAudit.ResourceFieldMappingAuditCol auditCol: auditRow.getCols()) { if (r.getResourceType().equals(ResourceType.MedicationOrder.toString()) || r.getResourceType().equals(ResourceType.MedicationStatement.toString())) { if (auditCol.getField().equals("medicationCodeableConcept.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "DrugCode"; } } else if (r.getResourceType().equals(ResourceType.Observation.toString()) || r.getResourceType().equals(ResourceType.Condition.toString()) || r.getResourceType().equals(ResourceType.Procedure.toString()) || r.getResourceType().equals(ResourceType.DiagnosticReport.toString())) { if (auditCol.getField().equals("code.text") || auditCol.getField().equals("component[1].code.text") || auditCol.getField().equals("component[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.AllergyIntolerance.toString())) { if (auditCol.getField().equals("substance.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.FamilyMemberHistory.toString())) { if (auditCol.getField().equals("condition[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Immunization.toString())) { if (auditCol.getField().equals("vaccineCode.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.DiagnosticOrder.toString())) { if (auditCol.getField().equals("item[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.ReferralRequest.toString())) { if (auditCol.getField().equals("serviceRequested[0].text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Specimen.toString())) { if (auditCol.getField().equals("type.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Location.toString())) { if (auditCol.getField().equals("managingOrganization.reference")) { isPotentiallyOnAnotherServer = true; desiredFileName = "OrganisationLocation"; } else { isPotentiallyOnAnotherServer = true; desiredFileName = "Location"; } } else if (r.getResourceType().equals(ResourceType.Organization.toString())) { isPotentiallyOnAnotherServer = true; desiredFileName = "Organisation"; } else if (r.getResourceType().equals(ResourceType.Practitioner.toString())) { isPotentiallyOnAnotherServer = true; desiredFileName = "UserInRole"; } if (isPotentiallyOnAnotherServer) { break; } } List<Integer> newIds = null; if (isPotentiallyOnAnotherServer) { newIds = findNewAuditIdOnAnyServer(oldStyleAuditId, desiredFileName); } else { newIds = findNewAuditIdOnThisServer(oldStyleAuditId); } Integer newFileAuditId = newIds.get(0); Integer newRecordNum = newIds.get(1); auditRow.setOldStyleAuditId(null); auditRow.setFileId(newFileAuditId.intValue()); auditRow.setRecord(newRecordNum.intValue()); } if (testMode) { String newAuditJson = audit.writeToJson(); String str = "Testing " + r.getResourceType() + " " + r.getResourceId() + " version " + r.getVersion() + " from " + r.getCreatedAt() + "\nOld JSON:" + "\n" + auditJson + "\nNew JSON:" + "\n" + newAuditJson; LOG.info(str); } return null; } private List<Integer> findNewAuditIdOnThisServer(Long oldStyleAuditId) throws Exception { EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //need to convert oldStyleID to a fileID and record number String sql = "select r.source_location, f.new_published_file_id" + " from source_file_record r" + " inner join source_file_mapping f" + " on f.id = r.source_file_id" + " where r.id = ?"; PreparedStatement ps = connection.prepareStatement(sql); ps.setLong(1, oldStyleAuditId.longValue()); ResultSet rs = ps.executeQuery(); if (!rs.next()) { throw new Exception("Failed to find source record details for old style audit ID " + oldStyleAuditId + " in audit for " + r.getResourceType() + " " + r.getResourceId() + " from " + r.getCreatedAt()); } int col = 1; String recordNumStr = rs.getString(col++); int newPublishedFileId = rs.getInt(col++); ps.close(); entityManager.close(); List<Integer> ret = new ArrayList<>(); ret.add(new Integer(newPublishedFileId)); ret.add(Integer.valueOf(recordNumStr)); return ret; } private List<Integer> findNewAuditIdOnAnyServer(Long oldStyleAuditId, String desiredFileName) throws Exception { if (hmPublishers == null) { Map<String, UUID> map = new HashMap<>(); List<String> publishers = new ArrayList<>(); publishers.add("publisher_01"); publishers.add("publisher_02"); publishers.add("publisher_03"); publishers.add("publisher_04"); publishers.add("publisher_05"); for (String publisher: publishers) { UUID serviceId = findSuitableServiceIdForPublisherConfig(publisher); map.put(publisher, serviceId); } hmPublishers = map; } Integer foundRecordNum = null; Integer foundPublishedFileId = null; String foundOnPublisher = null; for (String publisher: hmPublishers.keySet()) { UUID serviceId = hmPublishers.get(publisher); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //need to convert oldStyleID to a fileID and record number String sql = "select r.source_location, f.new_published_file_id" + " from source_file_record r" + " inner join source_file_mapping f" + " on f.id = r.source_file_id" + " where r.id = ?" + " and f.file_path LIKE '%" + desiredFileName + "%'"; PreparedStatement ps = connection.prepareStatement(sql); ps.setLong(1, oldStyleAuditId.longValue()); ResultSet rs = ps.executeQuery(); if (rs.next()) { int col = 1; String recordNumStr = rs.getString(col++); int newPublishedFileId = rs.getInt(col++); ps.close(); entityManager.close(); if (foundPublishedFileId == null) { foundPublishedFileId = new Integer(newPublishedFileId); foundRecordNum = Integer.valueOf(recordNumStr); foundOnPublisher = publisher; } else { LOG.error("Old style audit = " + oldStyleAuditId); LOG.error("On " + foundOnPublisher + " found " + foundPublishedFileId + " published file ID and record number " + foundRecordNum); LOG.error("On " + publisher + " found " + newPublishedFileId + " published file ID and record number " + recordNumStr); throw new Exception("Found more than one matching file for old-style audit ID " + oldStyleAuditId + " and desired file name " + desiredFileName + " over all servers"); } } } if (foundPublishedFileId == null) { throw new Exception("Failed to find published file ID and record number for old-style audit ID " + oldStyleAuditId + " and desired file name " + desiredFileName + " over all servers"); } List<Integer> ret = new ArrayList<>(); ret.add(foundPublishedFileId); ret.add(foundRecordNum); return ret; } } /*static class FileDesc { int id; UUID serviceId; UUID systemId; String filePath; UUID exchangeId; String fileDesc; int newId; } private static Integer auditParser(UUID serviceId, UUID systemId, UUID exchangeId, String filePath, String fileDesc) throws Exception { ParserI parser = createParser(serviceId, systemId, exchangeId, filePath, fileDesc); if (parser == null) { LOG.debug("No parser created for " + fileDesc + " " + filePath); return null; } Integer newId = parser.ensureFileAudited(); if (newId == null) { throw new Exception("Null new ID for auditing file " + filePath); } return new Integer(newId); } private static ParserI createParser(UUID serviceId, UUID systemId, UUID exchangeId, String filePath, String fileDesc) throws Exception { if (fileDesc.startsWith("Vision ")) { if (fileDesc.equals("Vision organisations file")) { return new Practice(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision staff file")) { return new Staff(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision patient file")) { return new Patient(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision encounter file")) { return new Encounter(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision referrals file")) { return new Referral(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision journal file")) { return new Journal(serviceId, systemId, exchangeId, null, filePath); } else { throw new Exception("Unknown vision file [" + fileDesc + "]"); } } if (fileDesc.equals("Bespoke Emis registration status extract") || fileDesc.equals("RegistrationStatus")) { String DATE_FORMAT = "dd/MM/yyyy"; String TIME_FORMAT = "hh:mm:ss"; CSVFormat CSV_FORMAT = CSVFormat.TDF .withHeader() .withEscape((Character)null) .withQuote((Character)null) .withQuoteMode(QuoteMode.MINIMAL); //ideally want Quote Mdde NONE, but validation in the library means we need to use this; List<String> possibleVersions = new ArrayList<>(); possibleVersions.add(RegistrationStatus.VERSION_WITH_PROCESSING_ID); possibleVersions.add(RegistrationStatus.VERSION_WITHOUT_PROCESSING_ID); RegistrationStatus testParser = new RegistrationStatus(null, null, null, null, filePath, CSV_FORMAT, DATE_FORMAT, TIME_FORMAT); possibleVersions = testParser.testForValidVersions(possibleVersions); String version = possibleVersions.get(0); return new RegistrationStatus(serviceId, systemId, exchangeId, version, filePath, CSV_FORMAT, DATE_FORMAT, TIME_FORMAT); } if (fileDesc.equals("OriginalTerms")) { String DATE_FORMAT2 = "dd/MM/yyyy"; String TIME_FORMAT2 = "hh:mm:ss"; CSVFormat CSV_FORMAT2 = CSVFormat.TDF .withHeader() .withEscape((Character)null) .withQuote((Character)null) .withQuoteMode(QuoteMode.MINIMAL); //ideally want Quote Mdde NONE, but validation in the library means we need to use this; return new OriginalTerms(serviceId, systemId, exchangeId, null, filePath, CSV_FORMAT2, DATE_FORMAT2, TIME_FORMAT2); } if (filePath.contains("EMIS")) { if (fileDesc.equals("Emis appointments file")) { fileDesc = "Slot"; } else if (fileDesc.equals("Emis appointments session file")) { fileDesc = "Session"; } else if (fileDesc.equals("Emis clinical code reference file")) { fileDesc = "ClinicalCode"; } else if (fileDesc.equals("Emis consultations file")) { fileDesc = "Consultation"; } else if (fileDesc.equals("Emis diary file")) { fileDesc = "Diary"; } else if (fileDesc.equals("Emis drug code reference file")) { fileDesc = "DrugCode"; } else if (fileDesc.equals("Emis drug record file")) { fileDesc = "DrugRecord"; } else if (fileDesc.equals("Emis issue records file")) { fileDesc = "IssueRecord"; } else if (fileDesc.equals("Emis observations file")) { fileDesc = "Observation"; } else if (fileDesc.equals("Emis organisation location file")) { fileDesc = "Location"; } else if (fileDesc.equals("Emis organisation-location link file")) { fileDesc = "OrganisationLocation"; } else if (fileDesc.equals("Emis organisations file")) { fileDesc = "Organisation"; } else if (fileDesc.equals("Emis patient file")) { fileDesc = "Patient"; } else if (fileDesc.equals("Emis problems file")) { fileDesc = "Problem"; } else if (fileDesc.equals("Emis referrals file")) { fileDesc = "ObservationReferral"; } else if (fileDesc.equals("Emis session-user link file")) { fileDesc = "SessionUser"; } else if (fileDesc.equals("Emis sharing agreements file")) { fileDesc = "SharingOrganisation"; } else if (fileDesc.equals("Emis staff file")) { fileDesc = "UserInRole"; } String fileType = null; switch (fileDesc) { case "ClinicalCode": fileType = "Coding_ClinicalCode"; break; case "Consultation": fileType = "CareRecord_Consultation"; break; case "Diary": fileType = "CareRecord_Diary"; break; case "DrugCode": fileType = "Coding_DrugCode"; break; case "DrugRecord": fileType = "Prescribing_DrugRecord"; break; case "IssueRecord": fileType = "Prescribing_IssueRecord"; break; case "Location": fileType = "Admin_Location"; break; case "Observation": fileType = "CareRecord_Observation"; break; case "ObservationReferral": fileType = "CareRecord_ObservationReferral"; break; case "Organisation": fileType = "Admin_Organisation"; break; case "OrganisationLocation": fileType = "Admin_OrganisationLocation"; break; case "Patient": fileType = "Admin_Patient"; break; case "Problem": fileType = "CareRecord_Problem"; break; case "Session": fileType = "Appointment_Session"; break; case "SessionUser": fileType = "Appointment_SessionUser"; break; case "SharingOrganisation": fileType = "Agreements_SharingOrganisation"; break; case "Slot": fileType = "Appointment_Slot"; break; case "UserInRole": fileType = "Admin_UserInRole"; break; default: throw new Exception("Unknown file type [" + fileDesc + "]"); } *//*String prefix = TransformConfig.instance().getSharedStoragePath(); prefix += "/"; if (!filePath.startsWith(prefix)) { throw new Exception("File path [" + filePath + "] doesn't start with " + prefix); } filePath = filePath.substring(prefix.length());*//* ExchangePayloadFile p = new ExchangePayloadFile(); p.setPath(filePath); p.setType(fileType); List<ExchangePayloadFile> files = new ArrayList<>(); files.add(p); String version = EmisCsvToFhirTransformer.determineVersion(files); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); Iterator<AbstractCsvParser> it = parsers.values().iterator(); return it.next(); } if (filePath.contains("BARTSDW")) { return null; } throw new Exception("Unknown file desc [" + fileDesc + "] for " + filePath); }*/ /*private static void moveS3ToAudit(int threads) { LOG.info("Moving S3 to Audit"); try { //list S3 contents List<FileInfo> files = FileHelper.listFilesInSharedStorageWithInfo("s3://discoveryaudit/audit"); LOG.debug("Found " + files.size() + " audits"); int countPerThread = files.size() / threads; int pos = 0; AtomicInteger done = new AtomicInteger(); List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<FileInfo> perThread = new ArrayList<>(); int countThisThread = countPerThread; if (i+1 == threads) { countThisThread = files.size() - pos; } for (int j=0; j<countThisThread; j++) { FileInfo fileInfo = files.get(pos); pos ++; perThread.add(fileInfo); } MoveToS3Runnable r = new MoveToS3Runnable(perThread, done); Thread t = new Thread(r); threadList.add(t); t.start(); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished with " + done.get() + " / " + files.size()); LOG.info("Finished Moving S3 to Audit"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void convertEmisGuids() { LOG.debug("Converting Emis Guid"); try { Map<String, String> map = new HashMap<>(); //this list of guids and dates is based off the live Emis extracts, giving the most recent bulk date for each organisation //only practices where the extract started before the move to AWS and where the extract was disabled and re-bulked need to be in here. //Practices disabled and re-bulked since the move to AWS are handled differently. map.put("{DD31E915-7076-46CF-99CD-8378AB588B69}", "20/07/2017"); map.put("{87A8851C-3DA4-4BE0-869C-3BF6BA7C0612}", "15/10/2017"); map.put("{612DCB3A-5BE6-4D50-909B-F0F20565F9FC}", "09/08/2017"); map.put("{15667F8D-46A0-4A87-9FA8-0C56B157A0A9}", "05/05/2017"); map.put("{3CFEFBF9-B856-4A40-A39A-4EB6FA39295E}", "31/01/2017"); map.put("{3F481450-AD19-4793-B1F0-40D5C2C57EF7}", "04/11/2017"); map.put("{83939542-20E4-47C5-9883-BF416294BB22}", "13/10/2017"); map.put("{73AA7E3A-4331-4167-8711-FE07DDBF4657}", "15/10/2017"); map.put("{3B703CCF-C527-4EC8-A802-00D3B1535DD0}", "01/02/2017"); map.put("{ED442CA3-351F-43E4-88A2-2EEACE39A402}", "13/10/2017"); map.put("{86537B5B-7CF3-4964-8906-7C10929FBC20}", "13/05/2017"); map.put("{9A4518C4-82CE-4509-8039-1B5F49F9C1FA}", "12/08/2017"); map.put("{16D7F8F9-4A35-44B1-8F1D-DD0162584684}", "11/07/2017"); map.put("{D392C499-345C-499B-898C-93F2CB8CC1B9}", "15/10/2017"); map.put("{5B87882A-0EE8-4233-93D0-D2F5F4F94040}", "15/03/2017"); map.put("{CFE3B460-9058-47FB-BF1D-6BEC13A2257D}", "19/04/2017"); map.put("{7B03E105-9275-47CC-8022-1469FE2D6AE4}", "20/04/2017"); map.put("{94470227-587C-47D7-A51F-9893512424D8}", "27/04/2017"); map.put("{734F4C99-6326-4CA4-A22C-632F0AC12FFC}", "17/10/2017"); map.put("{03C5B4B4-1A70-45F8-922E-135C826D48E0}", "20/04/2017"); map.put("{1BB17C3F-CE80-4261-AF6C-BE987E3A5772}", "09/05/2017"); map.put("{16F6DD42-2140-4395-95D5-3FA50E252896}", "20/04/2017"); map.put("{3B6FD632-3FFB-48E6-9775-287F6C486752}", "15/10/2017"); map.put("{F987F7BD-E19C-46D2-A446-913489F1BB7A}", "05/02/2017"); map.put("{BE7CC1DC-3CAB-4BB1-A5A2-B0C854C3B78E}", "06/07/2017"); map.put("{303EFA4E-EC8F-4CBC-B629-960E4D799E0D}", "15/10/2017"); map.put("{5EE8FD1F-F23A-4209-A1EE-556F9350C900}", "01/02/2017"); map.put("{04F6C555-A298-45F1-AC5E-AC8EBD2BB720}", "17/10/2017"); map.put("{67383254-F7F1-4847-9AA9-C7DCF32859B8}", "17/10/2017"); map.put("{31272E4E-40E0-4103-ABDC-F40A7B75F278}", "19/10/2017"); map.put("{09CA2E3B-7143-4999-9934-971F3F2E6D8C}", "15/10/2017"); map.put("{0527BCE2-4315-47F2-86A1-2E9F3E50399B}", "15/10/2017"); map.put("{16DD14B5-D1D5-4B0C-B886-59AC4DACDA7A}", "04/07/2017"); map.put("{411D0A79-6913-473C-B486-C01F6430D8A6}", "21/09/2017"); map.put("{0862FADA-594A-415E-B971-7A4312E0A58C}", "10/06/2017"); map.put("{249C3F3C-24F0-44CE-97A9-B535982BD70C}", "15/10/2017"); map.put("{5D7A1915-6E22-4B20-A8AE-4768C06D3BBF}", "28/09/2017"); //Barts community map.put("{131AE556-8B50-4C17-9D7D-A4B19F7B1FEA}", "15/10/2017"); //Aberfeldy practice F84698 map.put("{C0D2D0DF-EF78-444D-9A6D-B9EDEF5EF350}", "13/10/2017"); map.put("{F174B354-4156-4BCB-960F-35D0145075EA}", "01/02/2017"); map.put("{38600D63-1DE0-4910-8ED6-A38DC28A9DAA}", "19/02/2018"); //THE SPITALFIELDS PRACTICE (CDB 16);F84081 map.put("{B3ECA2DE-D926-4594-B0EA-CF2F28057CE1}", "19/10/2017"); map.put("{18F7C28B-2A54-4F82-924B-38C60631FFFA}", "04/02/2018"); //Rowans Surgery (CDB 18174);H85035 map.put("{16FB5EE8-5039-4068-BC42-1DB56DC2A530}", "08/06/2017"); map.put("{4BA4A5AC-7B25-40B2-B0EA-135702A72F9D}", "15/10/2017"); map.put("{01B8341F-BC8F-450E-8AFA-4CDA344A5009}", "15/10/2017"); map.put("{E6FBEA1C-BDA2-40B7-A461-C262103F08D7}", "08/06/2017"); map.put("{141C68EB-1BC8-4E99-A9D9-0E63A8944CA9}", "15/10/2017"); map.put("{A3EA804D-E7EB-43EE-8F1F-E860F6337FF7}", "15/10/2017"); map.put("{771B42CC-9C0C-46E2-8143-76F04AF91AD5}", "13/11/2017"); //cranwich road map.put("{16EA8D5C-C667-4818-B629-5D6F4300FEEF}", "11/05/2017"); map.put("{29E51964-C94D-4CB4-894E-EB18E27DEFC1}", "15/10/2017"); map.put("{3646CCA5-7FE4-4DFE-87CD-DA3CE1BA885D}", "27/09/2017"); map.put("{3EC82820-702F-4218-853B-D3E5053646A8}", "05/05/2017"); map.put("{37F3E676-B203-4329-97F8-2AF5BFEAEE5A}", "19/10/2017"); map.put("{A0E3208B-95E9-4284-9B5A-D4D387CCC9F9}", "07/06/2017"); map.put("{0BEAF1F0-9507-4AC2-8997-EC0BA1D0247E}", "19/10/2017"); map.put("{071A50E7-1764-4210-94EF-6A4BF96CF753}", "21/02/2017"); map.put("{0C1983D8-FB7D-4563-84D0-1F8F6933E786}", "20/07/2017"); map.put("{871FEEB2-CE30-4603-B9A3-6FA6CC47B5D4}", "15/10/2017"); map.put("{42906EBE-8628-486D-A52F-27B935C9937A}", "01/02/2017"); map.put("{1AB7ABF3-2572-4D07-B719-CFB2FE3AAC80}", "15/10/2017"); map.put("{E312A5B7-13E7-4E43-BE35-ED29F6216D3C}", "20/04/2017"); map.put("{55E60891-8827-40CD-8011-B0223D5C8970}", "15/10/2017"); map.put("{03A63F52-7FEE-4592-9B54-83CEBCF67B5D}", "26/04/2017"); map.put("{DB39B649-B48D-4AC2-BAB1-AC807AABFAC4}", "15/10/2017"); map.put("{0AF9B2AF-A0FB-40B0-BA05-743BA6845DB1}", "26/08/2017"); map.put("{A7600092-319C-4213-92C2-738BEEFC1609}", "31/01/2017"); map.put("{5A1AABA9-7E96-41E7-AF18-E02F4CF1DFB6}", "15/10/2017"); map.put("{7D8CE31D-66AA-4D6A-9EFD-313646BD1D73}", "15/10/2017"); map.put("{03EA4A79-B6F1-4524-9D15-992B47BCEC9A}", "15/10/2017"); map.put("{4588C493-2EA3-429A-8428-E610AE6A6D76}", "28/09/2017"); //Barts community map.put("{B13F3CC9-C317-4E0D-9C57-C545E4A53CAF}", "15/10/2017"); map.put("{463DA820-6EC4-48CB-B915-81B31AFBD121}", "13/10/2017"); map.put("{16F0D65C-B2A8-4186-B4E7-BBAF4390EC55}", "13/10/2017"); map.put("{0039EF15-2DCF-4F70-B371-014C807210FD}", "24/05/2017"); map.put("{E132BF05-78D9-4E4B-B875-53237E76A0FA}", "19/10/2017"); map.put("{3DFC2DA6-AD8C-4836-945D-A6F8DB22AA49}", "15/10/2017"); map.put("{BCB43B1D-2857-4186-918B-460620F98F81}", "13/10/2017"); map.put("{E134C74E-FA3E-4E14-A4BB-314EA3D3AC16}", "15/10/2017"); map.put("{C0F40044-C2CA-4D1D-95D3-553B29992385}", "26/08/2017"); map.put("{B174A018-538D-4065-838C-023A245B53DA}", "14/02/2017"); map.put("{43380A69-AE7D-4ED7-B014-0708675D0C02}", "08/06/2017"); map.put("{E503F0E0-FE56-4CEF-BAB5-0D25B834D9BD}", "13/10/2017"); map.put("{08946F29-1A53-4AF2-814B-0B8758112F21}", "07/02/2018"); //NEWHAM MEDICAL CENTRE (CDB 3461);F84669 map.put("{09857684-535C-4ED6-8007-F91F366611C6}", "19/10/2017"); map.put("{C409A597-009A-4E11-B828-A595755DE0EA}", "17/10/2017"); map.put("{58945A1C-2628-4595-8F8C-F75D93045949}", "15/10/2017"); map.put("{16FF2874-20B0-4188-B1AF-69C97055AA60}", "17/10/2017"); map.put("{2C91E9DA-3F92-464E-B6E6-61D3DE52E62F}", "15/10/2017"); map.put("{16E7AD27-2AD9-43C0-A473-1F39DF93E981}", "10/06/2017"); map.put("{A528478D-65DB-435C-9E98-F8BDB49C9279}", "20/04/2017"); map.put("{A2BDB192-E79C-44C5-97A2-1FD4517C456F}", "21/08/2017"); map.put("{73DFF193-E917-4DBC-B5CF-DD2797B29377}", "15/10/2017"); map.put("{62825316-9107-4E2C-A22C-86211B4760DA}", "13/10/2017"); map.put("{006E8A30-2A45-4DBE-91D7-1C53FADF38B1}", "28/01/2018"); //The Lawson Practice (CDB 4334);F84096 map.put("{E32AA6A6-46B1-4198-AA13-058038AB8746}", "13/10/2017"); map.put("{B51160F1-79E3-4BA7-AA3D-1112AB341146}", "30/09/2017"); map.put("{234503E5-56B4-45A0-99DA-39854FBE78E9}", "01/02/2017"); map.put("{7D1852DA-E264-4599-B9B4-8F40207F967D}", "09/10/2017"); map.put("{44716213-7FEE-4247-A09E-7285BD6B69C6}", "13/10/2017"); map.put("{19BCC870-2704-4D21-BA7B-56F2F472AF35}", "15/10/2017"); map.put("{FEF842DA-FD7C-480F-945A-D097910A81EB}", "13/10/2017"); map.put("{1C980E19-4A39-4ACD-BA8A-925D3E525765}", "13/10/2017"); map.put("{AABDDC3A-93A4-4A87-9506-AAF52E74012B}", "07/02/2018"); //DR N DRIVER AND PARTNERS (CDB 4419);F84086 map.put("{90C2959C-0C2D-43DC-A81B-4AD594C17999}", "20/04/2017"); map.put("{1F1669CF-1BB0-47A7-8FBF-BE65651644C1}", "15/10/2017"); map.put("{C1800BE8-4C1D-4340-B0F2-7ED208586ED3}", "15/10/2017"); map.put("{55A94703-4582-46FB-808A-1990E9CBCB6F}", "19/02/2018"); //Stamford Hill Group Practice (CDB 56);F84013 map.put("{D4996E62-268F-4759-83A6-7A68D0B38CEC}", "27/04/2017"); map.put("{3C843BBA-C507-4A95-9934-1A85B977C7B8}", "01/02/2017"); map.put("{2216253B-705D-4C46-ADB3-ED48493D6A39}", "03/02/2018"); //RIVERSIDE MEDICAL PRACTICE (CDB 14675);Y01962 map.put("{00123F97-4557-44AD-81B5-D9902DD72EE9}", "28/04/2017"); map.put("{E35D4D12-E7D2-484B-BFF6-4653B3FED228}", "15/10/2017"); map.put("{6D8B4D28-838B-4915-A148-6FEC2CEBCE77}", "05/07/2017"); map.put("{188D5B4D-4BF6-46E3-AF11-3AD32C68D251}", "19/10/2017"); map.put("{16F7DDE1-3763-4D3A-A58D-F12F967718CF}", "02/11/2017"); map.put("{03148933-6E1C-4A8A-A6D2-A3D488E14DDD}", "30/12/2017"); map.put("{16DE1A3C-875B-4AB2-B227-8A42604E029C}", "05/11/2017"); map.put("{D628D1BC-D02E-4101-B8CD-5B3DB2D06FC1}", "05/05/2017"); map.put("{1EA6259A-6A49-46DB-991D-D604675F87E2}", "15/10/2017"); map.put("{817F9B46-AEE0-45D5-95E3-989F75C4844E}", "20/04/2017"); map.put("{1C422471-F52A-4C30-8D23-140BEB7AAEFC}", "15/08/2017"); map.put("{A6467E73-0F15-49D6-AFAB-4DFB487E7963}", "10/05/2017"); map.put("{CC7D1781-1B85-4AD6-A5DD-9AD5E092E8DB}", "13/10/2017"); map.put("{167CD5C8-148F-4D78-8997-3B22EC0AF6B6}", "13/10/2017"); map.put("{9DD5D2CE-2585-49D8-AF04-2CB1BD137594}", "15/10/2017"); map.put("{D6696BB5-DE69-49D1-BC5E-C56799E42640}", "07/02/2018"); //BOLEYN MEDICAL CENTRE (CDB 4841);F84050 map.put("{169375A9-C3AB-4C5E-82B0-DFF7656AD1FA}", "20/04/2017"); map.put("{0A8ECFDE-95EE-4811-BC05-668D49F5C799}", "19/11/2017"); map.put("{79C898A1-BB92-48F9-B0C3-6725370132B5}", "20/10/2017"); map.put("{472AC9BA-AFFE-4E81-81CA-40DD8389784D}", "27/04/2017"); map.put("{00121CB7-76A6-4D57-8260-E9CA62FFCD77}", "13/10/2017"); map.put("{0FCBA0A7-7CAB-4E75-AC81-5041CD869CA1}", "15/10/2017"); map.put("{00A9C32D-2BB2-4A20-842A-381B3F2031C0}", "19/10/2017"); map.put("{26597C5A-3E29-4960-BE11-AC75D0430615}", "03/05/2017"); map.put("{D945FEF7-F5EF-422B-AB35-6937F9792B54}", "15/10/2017"); map.put("{16D685C6-130A-4B19-BCA9-90AC7DC16346}", "08/07/2017"); map.put("{F09E9CEF-2615-4C9D-AA3D-79E0AB10D0B3}", "13/10/2017"); map.put("{CD7EF748-DB88-49CF-AA6E-24F65029391F}", "15/10/2017"); map.put("{B22018CF-2B52-4A1A-9F6A-CEA13276DB2E}", "19/10/2017"); map.put("{0DF8CFC7-5DE6-4DDB-846A-7F28A2740A00}", "02/12/2017"); map.put("{50F439E5-DB18-43A0-9F25-825957013A07}", "11/01/2018"); //DR PI ABIOLA (CDB 5681);F84631 map.put("{00A3BA25-21C6-42DE-82AA-55FF0D85A6C3}", "31/10/2018"); //MARKET STREET HEALTH GROUP (CDB 381);F84004 map.put("{77B59D29-0FD9-4737-964F-5DBA49D94AB6}", "31/10/2018"); //Star Lane Medical Centre (CDB 40);F84017 map.put("{91239362-A105-4DEA-8E8E-239C3BCEDFD2}", "11/01/2018"); //BEECHWOOD MEDICAL CENTRE (CDB 5661);F84038 map.put("{53A113F5-6E3B-410F-A473-53E38A79335B}", "01/06/2018"); //ELFT Community RWKGY CDB 25362 map.put("{164BE8EC-E2D5-40DE-A5FC-25E058A5C47E}", "17/10/2018"); //Haiderian Medical Centre F82002 map.put("{164CE1B0-F7B3-44AF-B1E4-3DA6C64DEA4C}", "26/11/2018"); //THE GREEN WOOD PRACTICE F82007 map.put("{A30A4BB7-B17B-11D9-AD5F-00D0B77FCBFC}", "26/11/2018"); //Tulasi Medical Practice F82660 LOG.debug("Starting with map size " + map.size()); Map<String, String> hmGuidToOdsMap = new HashMap<>(); UUID systemId = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String ods = service.getLocalId(); String orgGuid = null; List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 5); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (!files.isEmpty()) { ExchangePayloadFile first = files.get(0); String path = first.getPath(); if (path.indexOf("EMIS_CUSTOM") > -1) { continue; } File f = new File(path); f = f.getParentFile(); //org GUID orgGuid = f.getName(); break; } } if (orgGuid == null) { LOG.error("Failed to find OrgGuid for " + service.getName() + " " + ods); } else { hmGuidToOdsMap.put(orgGuid, ods); } } //create new code for (String orgGuid: map.keySet()) { String dateStr = map.get(orgGuid); String odsCode = hmGuidToOdsMap.get(orgGuid); if (Strings.isNullOrEmpty(odsCode)) { LOG.error("Missing ODS code for " + orgGuid); } else { System.out.println("map.put(\"" + odsCode + "\", \"" + dateStr + "\");"); } } LOG.debug("Finished Converting Emis Guid"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) { LOG.debug("Testing S3 vs MySQL for service " + serviceUuid); try { //retrieve some audit JSON from the DB EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = "select resource_id, resource_type, version, mappings_json" + " from resource_field_mappings" + " where mappings_json != '[]'"; if (count > -1) { sql += "limit " + count + ";"; } Statement statement = connection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); List<ResourceFieldMapping> list = new ArrayList<>(); while (rs.next()) { int col = 1; String resourceId = rs.getString(col++); String resourceType = rs.getString(col++); String version = rs.getString(col++); String json = rs.getString(col++); ResourceFieldMapping obj = new ResourceFieldMapping(); obj.setResourceId(UUID.fromString(resourceId)); obj.setResourceType(resourceType); obj.setVersion(UUID.fromString(version)); obj.setResourceField(json); list.add(obj); } rs.close(); statement.close(); entityManager.close(); int done = 0; //test writing to S3 long s3Start = System.currentTimeMillis(); LOG.debug("Doing S3 test"); for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); String entryName = mapping.getVersion().toString() + ".json"; String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip"; String jsonStr = mapping.getResourceField(); //may as well zip the data, since it will compress well ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); zos.putNextEntry(new ZipEntry(entryName)); zos.write(jsonStr.getBytes()); zos.flush(); zos.close(); byte[] bytes = baos.toByteArray(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); //ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider(); DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance(); AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(credentialsProvider) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); objectMetadata.setContentLength(bytes.length); PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata); s3Client.putObject(putRequest); done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long s3End = System.currentTimeMillis(); LOG.debug("S3 took " + (s3End - s3Start) + " ms"); //test inserting into a DB long sqlStart = System.currentTimeMillis(); LOG.debug("Doing SQL test"); sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)"; entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); done = 0; int currentBatchSize = 0; for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); int col = 1; ps.setString(col++, mapping.getResourceId().toString()); ps.setString(col++, mapping.getResourceType()); ps.setDate(col++, new java.sql.Date(System.currentTimeMillis())); ps.setString(col++, mapping.getVersion().toString()); ps.setString(col++, mapping.getResourceField()); ps.addBatch(); currentBatchSize ++; if (currentBatchSize >= sqlBatchSize || i+1 == list.size()) { ps.executeBatch(); entityManager.getTransaction().commit(); //mirror what would happen normally ps.close(); entityManager.close(); if (i+1 < list.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long sqlEnd = System.currentTimeMillis(); LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms"); LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid); } catch (Throwable t) { LOG.error("", t); } }*/ private static void loadEmisData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) { LOG.debug("Loading Emis data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdfStart = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdfStart.parse("2000-01-01"); for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file : files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } String name = FilenameUtils.getBaseName(path); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to find extract date in filename " + path); } String dateStr = toks[3]; SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); Date extractDate = sdf.parse(dateStr); boolean processFile = false; if (type.equalsIgnoreCase("OriginalTerms") || type.equalsIgnoreCase("RegistrationStatus")) { //can't process these custom files in this routine } else if (type.equalsIgnoreCase("Coding_ClinicalCode") || type.equalsIgnoreCase("Coding_DrugCode")) { processFile = true; } else { if (!extractDate.before(startDate)) { processFile = true; } } if (processFile) { loadEmisDataFromFile(conn, path, type, extractDate); } } } conn.close(); LOG.debug("Finished Emis data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static ParserI createParserForEmisFileType(String fileType, String filePath) { String[] toks = fileType.split("_"); String domain = toks[0]; String name = toks[1]; String first = domain.substring(0, 1); String last = domain.substring(1); domain = first.toLowerCase() + last; try { String clsName = "org.endeavourhealth.transform.emis.csv.schema." + domain + "." + name; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); return constructor.newInstance(null, null, null, EmisCsvToFhirTransformer.VERSION_5_4, filePath); } catch (Exception ex) { LOG.error("No parser for file type [" + fileType + "]"); LOG.error("", ex); return null; } } private static void loadEmisDataFromFile(Connection conn, String filePath, String fileType, Date extractDate) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = createParserForEmisFileType(fileType, filePath); if (parser == null) { return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name, extract_date"; List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?, ?"; for (String col : cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); ps.setDate(col++, new java.sql.Date(extractDate.getTime())); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr : currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } private static void createBartsDataTables() { LOG.debug("Creating Barts data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("AEATT"); fileTypes.add("Birth"); //fileTypes.add("BulkDiagnosis"); //fileTypes.add("BulkProblem"); //fileTypes.add("BulkProcedure"); fileTypes.add("CLEVE"); fileTypes.add("CVREF"); fileTypes.add("DIAGN"); fileTypes.add("Diagnosis"); fileTypes.add("ENCINF"); fileTypes.add("ENCNT"); fileTypes.add("FamilyHistory"); fileTypes.add("IPEPI"); fileTypes.add("IPWDS"); fileTypes.add("LOREF"); fileTypes.add("NOMREF"); fileTypes.add("OPATT"); fileTypes.add("ORDER"); fileTypes.add("ORGREF"); fileTypes.add("PPADD"); fileTypes.add("PPAGP"); fileTypes.add("PPALI"); fileTypes.add("PPATI"); fileTypes.add("PPINF"); fileTypes.add("PPNAM"); fileTypes.add("PPPHO"); fileTypes.add("PPREL"); fileTypes.add("Pregnancy"); fileTypes.add("Problem"); fileTypes.add("PROCE"); fileTypes.add("Procedure"); fileTypes.add("PRSNLREF"); fileTypes.add("SusEmergency"); fileTypes.add("SusInpatient"); fileTypes.add("SusOutpatient"); fileTypes.add("EventCode"); fileTypes.add("EventSetCanon"); fileTypes.add("EventSet"); fileTypes.add("EventSetExplode"); fileTypes.add("BlobContent"); fileTypes.add("SusInpatientTail"); fileTypes.add("SusOutpatientTail"); fileTypes.add("SusEmergencyTail"); fileTypes.add("AEINV"); fileTypes.add("AETRE"); fileTypes.add("OPREF"); fileTypes.add("STATREF"); fileTypes.add("RTTPE"); fileTypes.add("PPATH"); fileTypes.add("DOCRP"); fileTypes.add("SCHAC"); fileTypes.add("EALEN"); fileTypes.add("DELIV"); fileTypes.add("EALOF"); fileTypes.add("SusEmergencyCareDataSet"); fileTypes.add("SusEmergencyCareDataSetTail"); for (String fileType : fileTypes) { createBartsDataTable(fileType); } LOG.debug("Finished Creating Barts data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTable(String fileType) throws Exception { ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, null); } catch (ClassNotFoundException cnfe) { System.out.println("-- No parser for file type [" + fileType + "]"); return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser) parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field : fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD") || col.equals("ORDER_COMMENTS_TXT")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String startDateStr, String onlyThisFileType) { LOG.debug("Loading Barts data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdf.parse(startDateStr); for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file : files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } boolean processFile = false; if (type.equalsIgnoreCase("CVREF") || type.equalsIgnoreCase("LOREF") || type.equalsIgnoreCase("ORGREF") || type.equalsIgnoreCase("PRSNLREF") || type.equalsIgnoreCase("NOMREF")) { processFile = true; } else { File f = new File(path); File parentFile = f.getParentFile(); String parentDir = parentFile.getName(); Date extractDate = sdf.parse(parentDir); if (!extractDate.before(startDate)) { processFile = true; } /*if (!extractDate.before(startDate) && !extractDate.after(endDate)) { processFile = true; }*/ } if (processFile) { loadBartsDataFromFile(conn, path, type); } } } conn.close(); LOG.debug("Finished Loading Barts data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, filePath); } catch (ClassNotFoundException cnfe) { LOG.error("No parser for file type [" + fileType + "]"); return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name"; List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?"; for (String col : cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr : currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } /*private static void fixPseudoIds(String subscriberConfig, int threads) { LOG.debug("Fixing Pseudo IDs for " + subscriberConfig); try { //update psuedo ID on patient table //update psuedo ID on person table //update pseudo ID on subscriber_transform mapping table JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } Connection subscriberConnection = EnterpriseFiler.openConnection(config); List<Long> patientIds = new ArrayList<>(); Map<Long, Long> hmOrgIds = new HashMap<>(); Map<Long, Long> hmPersonIds = new HashMap<>(); String sql = "SELECT id, organization_id, person_id FROM patient"; Statement statement = subscriberConnection.createStatement(); statement.setFetchSize(10000); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); long personId = rs.getLong(3); patientIds.add(new Long(patientId)); hmOrgIds.put(new Long(patientId), new Long(orgId)); hmPersonIds.put(new Long(patientId), new Long(personId)); } rs.close(); subscriberConnection.close(); LOG.debug("Found " + patientIds.size() + " patients"); AtomicInteger done = new AtomicInteger(); int pos = 0; List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<Long> patientSubset = new ArrayList<>(); int count = patientIds.size() / threads; if (i+1 == threads) { count = patientIds.size() - pos; } for (int j=0; j<count; j++) { Long patientId = patientIds.get(pos); patientSubset.add(patientId); pos ++; } FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done); Thread t = new Thread(runnable); t.start(); threadList.add(t); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } static class FixPseudoIdRunnable implements Runnable { private String subscriberConfig = null; private List<Long> patientIds = null; private Map<Long, Long> hmOrgIds = null; private Map<Long, Long> hmPersonIds = null; private AtomicInteger done = null; public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) { this.subscriberConfig = subscriberConfig; this.patientIds = patientIds; this.hmOrgIds = hmOrgIds; this.hmPersonIds = hmPersonIds; this.done = done; } @Override public void run() { try { doRun(); } catch (Throwable t) { LOG.error("", t); } } private void doRun() throws Exception { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Statement statement = subscriberConnection.createStatement(); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } //PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); Statement subscriberTransformStatement = subscriberTransformConnection.createStatement(); String sql = null; ResultSet rs = null; for (Long patientId: patientIds) { Long orgId = hmOrgIds.get(patientId); Long personId = hmPersonIds.get(patientId); //find service ID sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); //find patient ID sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); if (!resourceType.equals("Patient")) { throw new Exception("Not a patient resource type for enterprise ID " + patientId); } //get patient Resource resource = null; try { resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId); } catch (Exception ex) { throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex); } if (resource == null) { LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; //generate new pseudo ID String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt); //save to person if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE person" + " SET pseudo_id = null" + " WHERE id = " + personId; statement.executeUpdate(sql); } else { sql = "UPDATE person" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + personId; statement.executeUpdate(sql); } //save to patient if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE patient" + " SET pseudo_id = null" + " WHERE id = " + patientId; statement.executeUpdate(sql); } else { sql = "UPDATE patient" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + patientId; statement.executeUpdate(sql); } //linked distributers if (arr != null) { for (LinkDistributorConfig linked: arr) { String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked); sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')" + " ON DUPLICATE KEY UPDATE" + " target_salt_key_name = VALUES(target_salt_key_name)," + " target_skid = VALUES(target_skid)"; statement.executeUpdate(sql); } } //save to subscriber transform sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'"; subscriberTransformStatement.executeUpdate(sql); if (!Strings.isNullOrEmpty(pseudoId)) { sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')"; subscriberTransformStatement.executeUpdate(sql); } subscriberConnection.commit(); subscriberTransformConnection.commit(); int doneLocal = done.incrementAndGet(); if (doneLocal % 1000 == 0) { LOG.debug("Done " + doneLocal); } } statement.close(); subscriberTransformStatement.close(); subscriberConnection.close(); subscriberTransformConnection.close(); } }*/ /*private static void fixDeceasedPatients(String subscriberConfig) { LOG.debug("Fixing Deceased Patients for " + subscriberConfig); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Map<Long, Long> patientIds = new HashMap<>(); String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL"; Statement statement = subscriberConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); patientIds.put(new Long(patientId), new Long(orgId)); } rs.close(); statement.close(); EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); for (Long patientId: patientIds.keySet()) { Long orgId = patientIds.get(patientId); statement = subscriberTransformConnection.createStatement(); sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); statement.close(); Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId); if (resource == null) { LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; Date dob = patient.getBirthDate(); Date dod = patient.getDeceasedDateTimeType().getValue(); Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod); updateEnterprisePatient(patientId, ages, subscriberConnection); updateEnterprisePerson(patientId, ages, subscriberConnection); } subscriberConnection.close(); subscriberTransformConnection.close(); LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient SET "); sb.append("age_years = ?, "); sb.append("age_months = ?, "); sb.append("age_weeks = ? "); sb.append("WHERE id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks"); } private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //update the age fields on the person table where the person is for our patient and their pseudo IDs match StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient, person SET "); sb.append("person.age_years = ?, "); sb.append("person.age_months = ?, "); sb.append("person.age_weeks = ? "); sb.append("WHERE patient.id = ? "); sb.append("AND patient.person_id = person.id "); sb.append("AND patient.pseudo_id = person.pseudo_id"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); }*/ /*private static void testS3Read(String s3BucketName, String keyName, String start, String len) { LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); try { AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(DefaultAWSCredentialsProviderChain.getInstance()) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName); long startInt = Long.parseLong(start); long lenInt = Long.parseLong(len); long endInt = startInt + lenInt; request.setRange(startInt, endInt); long startMs = System.currentTimeMillis(); S3Object object = s3Client.getObject(request); InputStream inputStream = object.getObjectContent(); InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset()); StringBuilder sb = new StringBuilder(); char[] buf = new char[100]; while (true) { int read = reader.read(buf); if (read == -1 || sb.length() >= lenInt) { break; } sb.append(buf, 0, read); } reader.close(); long endMs = System.currentTimeMillis(); LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms"); LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void createTransforMap(UUID serviceId, String table, String outputFile) { LOG.debug("Creating transform map for " + serviceId + " from " + table); try { //retrieve from table EntityManager transformEntityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session2 = (SessionImpl)transformEntityManager.getDelegate(); Connection mappingConnection = session2.connection(); EntityManager ehrEntityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session3 = (SessionImpl)ehrEntityManager.getDelegate(); Connection ehrConnection = session3.connection(); String sql = "SELECT resource_type, resource_id, version FROM " + table; Statement statement = mappingConnection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); LOG.debug("Got resource IDs from DB"); Map<String, Map<String, List<String>>> hm = new HashMap<>(); int count = 0; //build map up per resource while (rs.next()) { String resourceType = rs.getString("resource_type"); String resourceId = rs.getString("resource_id"); String resourceVersion = rs.getString("version"); *//*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//* *//*sql = "SELECT * FROM resource_field_mappings WHERE version = ?"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//* sql = "SELECT * FROM resource_field_mappings WHERE resource_type = '" + resourceType + "' AND resource_id = '" + resourceId + "' AND version = '" + resourceVersion + "';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql); //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //statement1.setString(1, resourceVersion); *//*statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion);*//* ResultSet rs1 = null; try { rs1 = statement1.executeQuery(sql); } catch (Exception ex) { LOG.error("" + statement1); throw ex; } rs1.next(); String jsonStr = rs1.getString("mappings_json"); rs1.close(); statement1.close(); sql = "SELECT * FROM resource_history WHERE resource_type = ? AND resource_id = ? AND version = ?"; statement1 = ehrConnection.prepareStatement(sql); statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion); rs1 = statement1.executeQuery(); if (!rs1.next()) { throw new Exception("Failed to find resource_history for " + statement1.toString()); } String s = rs1.getString("resource_data"); rs1.close(); statement1.close(); if (Strings.isNullOrEmpty(s)) { continue; } JsonNode resourceJson = ObjectMapperPool.getInstance().readTree(s); Map<String, List<String>> hmResourceType = hm.get(resourceType); if (hmResourceType == null) { hmResourceType = new HashMap<>(); hm.put(resourceType, hmResourceType); } JsonNode json = ObjectMapperPool.getInstance().readTree(jsonStr); for (int i=0; i<json.size(); i++) { JsonNode child = json.get(i); JsonNode idNode = child.get("auditId"); JsonNode colsNode = child.get("cols"); if (idNode == null) { throw new Exception("No ID node in " + jsonStr); } if (colsNode == null) { throw new Exception("No cols node in " + jsonStr); } long id = idNode.asLong(); //get source file ID sql = "SELECT * FROM source_file_record WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, id); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileId = rs1.getLong("source_file_id"); rs1.close(); statement1.close(); //get source file type sql = "SELECT * FROM source_file WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileId); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileType = rs1.getLong("source_file_type_id"); rs1.close(); statement1.close(); //get the type desc sql = "SELECT * FROM source_file_type WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); rs1.next(); String fileTypeDesc = rs1.getString("description"); rs1.close(); statement1.close(); //get the cols Map<Integer, String> hmCols = new HashMap<>(); sql = "SELECT * FROM source_file_type_column WHERE source_file_type_id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); while (rs1.next()) { int index = rs1.getInt("column_index"); String name = rs1.getString("column_name"); hmCols.put(new Integer(index), name); } rs1.close(); statement1.close(); for (int j=0; j<colsNode.size(); j++) { JsonNode colNode = colsNode.get(j); int col = colNode.get("col").asInt(); String jsonField = colNode.get("field").asText(); int index = jsonField.indexOf("["); while (index > -1) { int endIndex = jsonField.indexOf("]", index); String prefix = jsonField.substring(0, index + 1); String suffix = jsonField.substring(endIndex); if (prefix.equals("extension[")) { String val = jsonField.substring(index+1, endIndex); int extensionIndex = Integer.parseInt(val); JsonNode extensionArray = resourceJson.get("extension"); JsonNode extensionRoot = extensionArray.get(extensionIndex); String extensionUrl = extensionRoot.get("url").asText(); extensionUrl = extensionUrl.replace("http://endeavourhealth.org/fhir/StructureDefinition/", ""); extensionUrl = extensionUrl.replace("http://hl7.org/fhir/StructureDefinition/", ""); jsonField = prefix + extensionUrl + suffix; } else { jsonField = prefix + "n" + suffix; } index = jsonField.indexOf("[", endIndex); } String colName = hmCols.get(new Integer(col)); String fileTypeAndCol = fileTypeDesc + ":" + colName; List<String> fieldNameMappings = hmResourceType.get(jsonField); if (fieldNameMappings == null) { fieldNameMappings = new ArrayList<>(); hmResourceType.put(jsonField, fieldNameMappings); } if (!fieldNameMappings.contains(fileTypeAndCol)) { fieldNameMappings.add(fileTypeAndCol); } } } count ++; if (count % 500 == 0) { LOG.debug("Done " + count); } } LOG.debug("Done " + count); rs.close(); ehrEntityManager.close(); //create output file List<String> lines = new ArrayList<>(); List<String> resourceTypes = new ArrayList<>(hm.keySet()); Collections.sort(resourceTypes, String.CASE_INSENSITIVE_ORDER); for (String resourceType: resourceTypes) { lines.add("============================================================"); lines.add(resourceType); lines.add("============================================================"); Map<String, List<String>> hmResourceType = hm.get(resourceType); List<String> fields = new ArrayList<>(hmResourceType.keySet()); Collections.sort(fields, String.CASE_INSENSITIVE_ORDER); for (String field: fields) { String linePrefix = field + " = "; List<String> sourceRecords = hmResourceType.get(field); for (String sourceRecord: sourceRecords) { lines.add(linePrefix + sourceRecord); linePrefix = Strings.repeat(" ", linePrefix.length()); } lines.add(""); } lines.add(""); } File f = new File(outputFile); Path p = f.toPath(); Files.write(p, lines, Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); LOG.debug("Finished creating transform map from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixBartsPatients(UUID serviceId) { LOG.debug("Fixing Barts patients at service " + serviceId); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); int checked = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { String patientId = rs.getString(1); ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId)); if (wrapper == null) { LOG.error("Failed to get recource current for ID " + patientId); continue; } String oldJson = wrapper.getResourceData(); Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson); PatientBuilder patientBuilder = new PatientBuilder(patient); List<String> numbersFromCsv = new ArrayList<>(); if (patient.hasTelecom()) { for (ContactPoint contactPoint: patient.getTelecom()) { if (contactPoint.hasId()) { numbersFromCsv.add(contactPoint.getValue()); } } for (String numberFromCsv: numbersFromCsv) { PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv); } } List<HumanName> namesFromCsv = new ArrayList<>(); if (patient.hasName()) { for (HumanName name: patient.getName()) { if (name.hasId()) { namesFromCsv.add(name); } } for (HumanName name: namesFromCsv) { PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name); } } List<Address> addressesFromCsv = new ArrayList<>(); if (patient.hasAddress()) { for (Address address: patient.getAddress()) { if (address.hasId()) { addressesFromCsv.add(address); } } for (Address address: addressesFromCsv) { PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address); } } String newJson = FhirSerializationHelper.serializeResource(patient); if (!newJson.equals(oldJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } checked ++; if (checked % 1000 == 0) { LOG.debug("Checked " + checked + " fixed " + fixed); } } LOG.debug("Checked " + checked + " fixed " + fixed); rs.close(); s.close(); edsEntityManager.close(); LOG.debug("Finish Fixing Barts patients at service " + serviceId); } catch (Throwable t) { LOG.error("", t); } }*/ private static void postToRabbit(String exchangeName, String srcFile, Integer throttle) { LOG.info("Posting to " + exchangeName + " from " + srcFile); if (throttle != null) { LOG.info("Throttled to " + throttle + " messages/second"); } try { File src = new File(srcFile); //create file of ones done File dir = src.getParentFile(); String name = "DONE" + src.getName(); File dst = new File(dir, name); Set<UUID> hsAlreadyDone = new HashSet<>(); if (dst.exists()) { List<String> lines = Files.readAllLines(dst.toPath()); for (String line : lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); hsAlreadyDone.add(uuid); } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Already done " + hsAlreadyDone.size()); } List<UUID> exchangeIds = new ArrayList<>(); int countTotal = 0; List<String> lines = Files.readAllLines(src.toPath()); for (String line : lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); countTotal++; if (!hsAlreadyDone.contains(uuid)) { exchangeIds.add(uuid); } } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Found " + countTotal + " down to " + exchangeIds.size() + " skipping ones already done, to post to " + exchangeName); continueOrQuit(); FileWriter fileWriter = new FileWriter(dst, true); PrintWriter printWriter = new PrintWriter(fileWriter); long startMs = System.currentTimeMillis(); int doneThisSecond = 0; LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName); for (int i = 0; i < exchangeIds.size(); i++) { UUID exchangeId = exchangeIds.get(i); List<UUID> tmp = new ArrayList<>(); tmp.add(exchangeId); QueueHelper.postToExchange(tmp, exchangeName, null, true); printWriter.println(exchangeId.toString()); printWriter.flush(); if (i % 5000 == 0) { LOG.debug("Done " + i + " / " + exchangeIds.size()); } if (throttle != null) { doneThisSecond++; if (doneThisSecond > throttle.intValue()) { long now = System.currentTimeMillis(); long sleep = 1000 - (now - startMs); if (sleep > 0) { Thread.sleep(sleep); } startMs = System.currentTimeMillis(); doneThisSecond = 0; } } } printWriter.close(); LOG.info("Finished Posting to " + exchangeName + " from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } private static void postExchangesToProtocol(String srcFile) { LOG.info("Posting to protocol from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } } LOG.info("Posting " + exchangeIds.size() + " to Protocol queue"); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false); LOG.info("Finished Posting to protocol from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } /* create table uprn_pseudo_map ( uprn bigint, pseudo_uprn varchar(255), property_class varchar(10) ); */ private static void calculateUprnPseudoIds(String subscriberConfigName, String targetTable) throws Exception { LOG.info("Calculating UPRN Pseudo IDs " + subscriberConfigName); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); JsonNode pseudoNode = config.get("pseudonymisation"); if (pseudoNode == null){ LOG.error("No salt key found!"); return; } JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); byte[] saltBytes = Base64.getDecoder().decode(base64Salt); EntityManager subscrberEntityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfigName); SessionImpl session = (SessionImpl) subscrberEntityManager.getDelegate(); Connection subscriberConnection = session.connection(); String upsertSql = "INSERT INTO " + targetTable + " (uprn, pseudo_uprn, property_class) VALUES (?, ?, ?)"; PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; int done = 0; EntityManager referenceEntityManager = ConnectionManager.getReferenceEntityManager(); session = (SessionImpl) referenceEntityManager.getDelegate(); Connection referenceConnection = session.connection(); String selectSql = "SELECT uprn, property_class FROM uprn_property_class"; PreparedStatement psSelect = referenceConnection.prepareStatement(selectSql); psSelect.setFetchSize(2000); LOG.info("Starting query on EDS database"); ResultSet rs = psSelect.executeQuery(); LOG.info("Got raw results back"); while (rs.next()) { long uprn = rs.getLong(1); String cls = rs.getString(2); String pseuoUprn = null; TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); psUpsert.setLong(1, uprn); psUpsert.setString(2, pseuoUprn); psUpsert.setString(3, cls); psUpsert.addBatch(); inBatch++; done++; if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.debug("Done " + done); psUpsert.close(); subscrberEntityManager.close(); psSelect.close(); referenceEntityManager.close(); LOG.info("Finished Calculating UPRN Pseudo IDs " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void populateSubscriberUprnTable(String subscriberConfigName, Integer overrideBatchSize, String specificPatientId) throws Exception { LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName); try { int saveBatchSize = TransformConfig.instance().getResourceSaveBatchSize(); if (overrideBatchSize != null) { saveBatchSize = overrideBatchSize.intValue(); } JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); //changed the format of the JSON JsonNode pseudoNode = config.get("pseudonymisation"); boolean pseudonymised = pseudoNode != null; byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); } /*boolean pseudonymised = config.get("pseudonymised").asBoolean(); byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = config.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); }*/ List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { Connection subscriberConnection = connectionWrapper.getConnection(); LOG.info("Populating " + connectionWrapper); String upsertSql; if (pseudonymised) { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode, property_class)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " pseudo_uprn = VALUES(pseudo_uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)," + " property_class = VALUES(property_class)"; } else { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode, property_class)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " uprn = VALUES(uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)," + " property_class = VALUES(property_class)"; } PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); SubscriberResourceMappingDalI enterpriseIdDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal(); int checked = 0; int saved = 0; Map<String, Boolean> hmPermittedPublishers = new HashMap<>(); //join to the property class table - this isn't the best way of doing it as it will only work while //the reference and eds databases are on the same server //String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn"; String sql = "SELECT a.service_id, a.patient_id, a.uprn, a.qualifier, a.abp_address, a.`algorithm`," + " a.`match`, a.no_address, a.invalid_address, a.missing_postcode, a.invalid_postcode, c.property_class" + " FROM patient_address_uprn a" + " LEFT OUTER JOIN reference.uprn_property_class c" + " ON c.uprn = a.uprn"; //support one patient at a time for debugging if (specificPatientId != null) { sql += " WHERE a.patient_id = '" + specificPatientId + "'"; LOG.debug("Restricting to patient " + specificPatientId); } Statement s = edsConnection.createStatement(); s.setFetchSize(2000); //don't get all rows at once LOG.info("Starting query on EDS database"); ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { int col = 1; String serviceId = rs.getString(col++); String patientId = rs.getString(col++); Long uprn = rs.getLong(col++); if (rs.wasNull()) { uprn = null; } String qualifier = rs.getString(col++); String abpAddress = rs.getString(col++); String algorithm = rs.getString(col++); String match = rs.getString(col++); boolean noAddress = rs.getBoolean(col++); boolean invalidAddress = rs.getBoolean(col++); boolean missingPostcode = rs.getBoolean(col++); boolean invalidPostcode = rs.getBoolean(col++); String propertyClass = rs.getString(col++); //because of past mistakes, we have Discovery->Enterprise mappings for patients that //shouldn't, so we also need to check that the service ID is definitely a publisher to this subscriber Boolean isPublisher = hmPermittedPublishers.get(serviceId); if (isPublisher == null) { List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId, null); //passing null means don't filter on system ID for (LibraryItem libraryItem : libraryItems) { Protocol protocol = libraryItem.getProtocol(); if (protocol.getEnabled() != ProtocolEnabled.TRUE) { continue; } //check to make sure that this service is actually a PUBLISHER to this protocol boolean isProtocolPublisher = false; for (ServiceContract serviceContract : protocol.getServiceContract()) { if (serviceContract.getType().equals(ServiceContractType.PUBLISHER) && serviceContract.getService().getUuid().equals(serviceId) && serviceContract.getActive() == ServiceContractActive.TRUE) { isProtocolPublisher = true; break; } } if (!isProtocolPublisher) { continue; } //check to see if this subscriber config is a subscriber to this DB for (ServiceContract serviceContract : protocol.getServiceContract()) { if (serviceContract.getType().equals(ServiceContractType.SUBSCRIBER) && serviceContract.getActive() == ServiceContractActive.TRUE) { ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); UUID subscriberServiceId = UUID.fromString(serviceContract.getService().getUuid()); UUID subscriberTechnicalInterfaceId = UUID.fromString(serviceContract.getTechnicalInterface().getUuid()); Service subscriberService = serviceRepository.getById(subscriberServiceId); List<ServiceInterfaceEndpoint> serviceEndpoints = subscriberService.getEndpointsList(); for (ServiceInterfaceEndpoint serviceEndpoint : serviceEndpoints) { if (serviceEndpoint.getTechnicalInterfaceUuid().equals(subscriberTechnicalInterfaceId)) { String protocolSubscriberConfigName = serviceEndpoint.getEndpoint(); if (protocolSubscriberConfigName.equals(subscriberConfigName)) { isPublisher = new Boolean(true); break; } } } } } } if (isPublisher == null) { isPublisher = new Boolean(false); } hmPermittedPublishers.put(serviceId, isPublisher); } if (specificPatientId != null) { LOG.debug("Org is publisher = " + isPublisher); } if (!isPublisher.booleanValue()) { continue; } //check if patient ID already exists in the subscriber DB Long subscriberPatientId = enterpriseIdDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), patientId); if (specificPatientId != null) { LOG.debug("Got patient " + patientId + " with UPRN " + uprn + " and property class " + propertyClass + " and subscriber patient ID " + subscriberPatientId); } //if the patient doesn't exist on this subscriber DB, then don't transform this record if (subscriberPatientId == null) { continue; } //see if the patient actually exists in the subscriber DB (might not if the patient is deleted or confidential) String checkSql = "SELECT id FROM patient WHERE id = ?"; Connection subscriberConnection2 = connectionWrapper.getConnection(); PreparedStatement psCheck = subscriberConnection2.prepareStatement(checkSql); psCheck.setLong(1, subscriberPatientId); ResultSet checkRs = psCheck.executeQuery(); boolean inSubscriberDb = checkRs.next(); psCheck.close(); subscriberConnection2.close(); if (!inSubscriberDb) { LOG.info("Skipping patient " + patientId + " -> " + subscriberPatientId + " as not found in enterprise DB"); continue; } SubscriberOrgMappingDalI orgMappingDal = DalProvider.factorySubscriberOrgMappingDal(subscriberConfigName); Long subscriberOrgId = orgMappingDal.findEnterpriseOrganisationId(serviceId); String discoveryPersonId = patientLinkDal.getPersonId(patientId); SubscriberPersonMappingDalI personMappingDal = DalProvider.factorySubscriberPersonMappingDal(subscriberConfigName); Long subscriberPersonId = personMappingDal.findOrCreateEnterprisePersonId(discoveryPersonId); String lsoaCode = null; if (!Strings.isNullOrEmpty(abpAddress)) { String[] toks = abpAddress.split(" "); String postcode = toks[toks.length - 1]; PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode); if (postcodeReference != null) { lsoaCode = postcodeReference.getLsoaCode(); } } col = 1; psUpsert.setLong(col++, subscriberPatientId); psUpsert.setLong(col++, subscriberOrgId); psUpsert.setLong(col++, subscriberPersonId); psUpsert.setString(col++, lsoaCode); if (pseudonymised) { String pseuoUprn = null; if (uprn != null) { TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); } psUpsert.setString(col++, pseuoUprn); } else { if (uprn != null) { psUpsert.setLong(col++, uprn.longValue()); } else { psUpsert.setNull(col++, Types.BIGINT); } } psUpsert.setString(col++, qualifier); psUpsert.setString(col++, algorithm); psUpsert.setString(col++, match); psUpsert.setBoolean(col++, noAddress); psUpsert.setBoolean(col++, invalidAddress); psUpsert.setBoolean(col++, missingPostcode); psUpsert.setBoolean(col++, invalidPostcode); psUpsert.setString(col++, propertyClass); if (specificPatientId != null) { LOG.debug("" + psUpsert); } psUpsert.addBatch(); inBatch++; saved++; if (inBatch >= saveBatchSize) { try { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } catch (Exception ex) { LOG.error("Error saving UPRN for " + patientId + " -> " + subscriberPatientId + " for org " + subscriberOrgId); LOG.error("" + psUpsert); throw ex; } } checked++; if (checked % 1000 == 0) { LOG.info("Checked " + checked + " Saved " + saved); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.info("Chcked " + checked + " Saved " + saved); psUpsert.close(); subscriberConnection.close(); edsEntityManager.close(); subscriberConnection.close(); } LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixPersonsNoNhsNumber() { LOG.info("Fixing persons with no NHS number"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection patientSearchConnection = session.connection(); Statement patientSearchStatement = patientSearchConnection.createStatement(); for (Service service: services) { LOG.info("Doing " + service.getName() + " " + service.getId()); int checked = 0; int fixedPersons = 0; int fixedSearches = 0; String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)"; ResultSet rs = patientSearchStatement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String nhsNumber = rs.getString(2); //find matched person ID String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'"; Statement s = patientSearchConnection.createStatement(); ResultSet rsPersonId = s.executeQuery(personIdSql); String personId = null; if (rsPersonId.next()) { personId = rsPersonId.getString(1); } rsPersonId.close(); s.close(); if (Strings.isNullOrEmpty(personId)) { LOG.error("Patient " + patientId + " has no person ID"); continue; } //see whether person ID used NHS number to match String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'"; s = patientSearchConnection.createStatement(); ResultSet rsPatientLink = s.executeQuery(patientLinkSql); String matchingNhsNumber = null; if (rsPatientLink.next()) { matchingNhsNumber = rsPatientLink.getString(1); } rsPatientLink.close(); s.close(); //if patient link person has a record for this nhs number, update the person link if (!Strings.isNullOrEmpty(matchingNhsNumber)) { String newPersonId = UUID.randomUUID().toString(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String createdAtStr = sdf.format(new Date()); s = patientSearchConnection.createStatement(); //new record in patient link history String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')"; //LOG.debug(patientHistorySql); s.execute(patientHistorySql); //update patient link String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'"; s.execute(patientLinkUpdateSql); patientSearchConnection.commit(); s.close(); fixedPersons ++; } //if patient search has an invalid NHS number, update it if (!Strings.isNullOrEmpty(nhsNumber)) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); patientSearchDal.update(service.getId(), patient); fixedSearches ++; } checked ++; if (checked % 50 == 0) { LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); } } LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); rs.close(); } patientSearchStatement.close(); entityManager.close(); LOG.info("Finished fixing persons with no NHS number"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void checkDeletedObs(UUID serviceId, UUID systemId) { LOG.info("Checking Observations for " + serviceId); try { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); List<String> subscriberConfigs = new ArrayList<>(); subscriberConfigs.add("ceg_data_checking"); subscriberConfigs.add("ceg_enterprise"); subscriberConfigs.add("hurley_data_checking"); subscriberConfigs.add("hurley_deidentified"); Set<String> observationsNotDeleted = new HashSet<>(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile firstItem = payload.get(0); //String version = EmisCsvToFhirTransformer.determineVersion(payload); //if we've reached the point before we process data for this practice, break out try { if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) { break; } } catch (TransformException e) { LOG.info("Skipping exchange containing " + firstItem.getPath()); continue; } String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch : batches) { if (batch.getEdsPatientId() != null) { hmBatchesByPatient.put(batch.getEdsPatientId(), batch); } } for (ExchangePayloadFile item : payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String deleted = record.get("Deleted"); String observationId = record.get("ObservationGuid"); if (deleted.equalsIgnoreCase("true")) { //if observation was reinstated at some point, skip it if (observationsNotDeleted.contains(observationId)) { continue; } String patientId = record.get("PatientGuid"); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId); Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell); for (ResourceType resourceType: resourceTypes) { //will already have been done OK if (resourceType == ResourceType.Observation) { continue; } String sourceId = patientId + ":" + observationId; UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId); } LOG.debug("Fixing " + resourceType + " " + uuid); //create file of IDs to delete for each subscriber DB for (String subscriberConfig : subscriberConfigs) { EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig); Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString()); if (enterpriseId == null) { continue; } String sql = null; if (resourceType == ResourceType.AllergyIntolerance) { sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId; } else if (resourceType == ResourceType.ReferralRequest) { sql = "DELETE FROM referral_request WHERE id = " + enterpriseId; } else { sql = "DELETE FROM observation WHERE id = " + enterpriseId; } sql += "\n"; File f = new File(subscriberConfig + ".sql"); Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } //delete resource if not already done ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (resourceWrapper != null && !resourceWrapper.isDeleted()) { ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId()); resourceWrapper.setDeleted(true); resourceWrapper.setResourceData(null); resourceWrapper.setResourceMetadata(""); resourceWrapper.setExchangeBatchId(batch.getBatchId()); resourceWrapper.setVersion(UUID.randomUUID()); resourceWrapper.setCreatedAt(new Date()); resourceWrapper.setExchangeId(exchange.getId()); resourceDal.delete(resourceWrapper); } } } else { observationsNotDeleted.add(observationId); } } parser.close(); } } } LOG.info("Finished Checking Observations for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) { LOG.info("Testing Batch Inserts"); try { int inserts = Integer.parseInt(num); int batchSize = Integer.parseInt(batchSizeStr); LOG.info("Openning Connection"); Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); //String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);"; String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)"; PreparedStatement ps = conn.prepareStatement(sql); if (batchSize == 1) { LOG.info("Testing non-batched inserts"); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.execute(); } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } else { LOG.info("Testing batched inserts with batch size " + batchSize); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.addBatch(); if ((i + 1) % batchSize == 0 || i + 1 >= inserts) { ps.executeBatch(); } } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } ps.close(); conn.close(); LOG.info("Finished Testing Batch Inserts"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static String randomStr() { StringBuffer sb = new StringBuffer(); Random r = new Random(System.currentTimeMillis()); while (sb.length() < 1100) { sb.append(r.nextLong()); } return sb.toString(); } /*private static void fixEmisProblems(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems for " + serviceId); try { Map<String, List<String>> hmReferences = new HashMap<>(); Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Caching problem links"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); //String version = EmisCsvToFhirTransformer.determineVersion(payload); ExchangePayloadFile firstItem = payload.get(0); String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("ObservationGuid"); String localId = patientId + ":" + observationId; ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId)); Reference localReference = ReferenceHelper.createReference(resourceType, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_DrugRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("DrugRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_IssueRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("IssueRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else { //no problem link } } } LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString()); for (ResourceWrapper wrapper: wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } //sort out the contained list of children ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder); //remove any existing children listBuilder.removeContainedList(); //add all the new ones we've found List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString()); if (localChildReferences != null) { for (String localChildReference: localChildReferences) { Reference reference = ReferenceHelper.createReference(localChildReference); listBuilder.addContainedListItem(reference); } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(condition); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) { try { LOG.info("Doing fix for " + publisher); String[] done = new String[]{ "01fcfe94-5dfd-4951-b74d-129f874209b0", "07a267d3-189b-4968-b9b0-547de28edef5", "0b9601d1-f7ab-4f5d-9f77-1841050f75ab", "0fd2ff5d-2c25-4707-afe8-707e81a250b8", "14276da8-c344-4841-a36d-aa38940e78e7", "158251ca-0e1d-4471-8fae-250b875911e1", "160131e2-a5ff-49c8-b62e-ae499a096193", "16490f2b-62ce-44c6-9816-528146272340", "18fa1bed-b9a0-4d55-a0cc-dfc31831259a", "19cba169-d41e-424a-812f-575625c72305", "19ff6a03-25df-4e61-9ab1-4573cfd24729", "1b3d1627-f49e-4103-92d6-af6016476da3", "1e198fbb-c9cd-429a-9b50-0f124d0d825c", "20444fbe-0802-46fc-8203-339a36f52215", "21e27bf3-8071-48dd-924f-1d8d21f9216f", "23203e72-a3b0-4577-9942-30f7cdff358e", "23be1f4a-68ec-4a49-b2ec-aa9109c99dcd", "2b56033f-a9b4-4bab-bb53-c619bdb38895", "2ba26f2d-8068-4b77-8e62-431edfc2c2e2", "2ed89931-0ce7-49ea-88ac-7266b6c03be0", "3abf8ded-f1b1-495b-9a2d-5d0223e33fa7", "3b0f6720-2ffd-4f8a-afcd-7e3bb311212d", "415b509a-cf39-45bc-9acf-7f982a00e159", "4221276f-a3b0-4992-b426-ec2d8c7347f2", "49868211-d868-4b55-a201-5acac0be0cc0", "55fdcbd0-9b2d-493a-b874-865ccc93a156", "56124545-d266-4da9-ba1f-b3a16edc7f31", "6c11453b-dbf8-4749-a0ec-ab705920e316" }; ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> all = dal.getAll(); for (Service service: all) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { boolean alreadyDone = false; String idStr = service.getId().toString(); for (String doneId: done) { if (idStr.equalsIgnoreCase(doneId)) { alreadyDone = true; break; } } if (alreadyDone) { continue; } fixEmisProblems3(service.getId(), systemId); } } LOG.info("Done fix for " + publisher); } catch (Throwable t) { LOG.error("", t); } } private static void fixEmisProblems3(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems 3 for " + serviceId); try { Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Finding patients"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("Admin_Patient")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientId = record.get("PatientGuid"); patientIds.add(patientId); } parser.close(); } } } LOG.info("Finished checking files, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); for (ResourceType resourceType: potentialResourceTypes) { List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString()); for (ResourceWrapper wrapper : wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson); //Also go through all observation records and any that have parent observations - these need fixing too??? Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE); if (extension != null) { Reference reference = (Reference)extension.getValue(); fixReference(serviceId, filer, reference, potentialResourceTypes); } if (resource instanceof Observation) { Observation obs = (Observation)resource; if (obs.hasRelated()) { for (Observation.ObservationRelatedComponent related: obs.getRelated()) { if (related.hasTarget()) { Reference reference = related.getTarget(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } if (resource instanceof DiagnosticReport) { DiagnosticReport diag = (DiagnosticReport)resource; if (diag.hasResult()) { for (Reference reference: diag.getResult()) { fixReference(serviceId, filer, reference, potentialResourceTypes); } } } //Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save if (resource instanceof Condition) { if (resource.hasContained()) { for (Resource contained: resource.getContained()) { if (contained.getId().equals("Items")) { List_ containedList = (List_)contained; if (containedList.hasEntry()) { for (List_.ListEntryComponent entry: containedList.getEntry()) { Reference reference = entry.getItem(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } } //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(resource); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed++; } } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems 3 for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception { //if it's already something other than observation, we're OK ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference); if (comps.getResourceType() != ResourceType.Observation) { return false; } Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference); String sourceId = ReferenceHelper.getReferenceId(sourceReference); String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId); if (newReferenceValue == null) { return false; } reference.setReference(newReferenceValue); return true; } private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception { ResourceDalI dal = DalProvider.factoryResourceDal(); for (ResourceType resourceType: potentials) { UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { continue; } ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (wrapper != null) { return ReferenceHelper.createResourceReference(resourceType, uuid.toString()); } } return null; }*/ /*private static void convertExchangeBody(UUID systemUuid) { try { LOG.info("Converting exchange bodies for system " + systemUuid); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE); if (exchanges.isEmpty()) { continue; } LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges"); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); try { //already done ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class); continue; } catch (JsonSyntaxException ex) { //if the JSON can't be parsed, then it'll be the old format of body that isn't JSON } List<ExchangePayloadFile> newFiles = new ArrayList<>(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (String file: files) { ExchangePayloadFile fileObj = new ExchangePayloadFile(); String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1); fileObj.setPath(fileWithoutSharedStorage); //size List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file); for (FileInfo info: fileInfos) { if (info.getFilePath().equals(file)) { long size = info.getSize(); fileObj.setSize(new Long(size)); } } //type if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live || systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev //emis String name = FilenameUtils.getName(file); String[] toks = name.split("_"); String first = toks[1]; String second = toks[2]; fileObj.setType(first + "_" + second); *//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095") || systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev //cerner String name = FilenameUtils.getName(file); if (Strings.isNullOrEmpty(name)) { continue; } try { String type = BartsCsvToFhirTransformer.identifyFileType(name); fileObj.setType(type); } catch (Exception ex2) { throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId()); }*//* } else { throw new Exception("Unknown system ID " + systemUuid); } newFiles.add(fileObj); } String json = JsonSerializer.serialize(newFiles); exchange.setBody(json); exchangeDal.save(exchange); } } LOG.info("Finished Converting exchange bodies for system " + systemUuid); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixBartsOrgs(String serviceId) { try { LOG.info("Fixing Barts orgs"); ResourceDalI dal = DalProvider.factoryResourceDal(); List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString()); LOG.debug("Found " + wrappers.size() + " resources"); int done = 0; int fixed = 0; for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId()); ResourceWrapper mostRecent = history.get(0); String json = mostRecent.getResourceData(); Organization org = (Organization)FhirSerializationHelper.deserializeResource(json); String odsCode = IdentifierHelper.findOdsCode(org); if (Strings.isNullOrEmpty(odsCode) && org.hasIdentifier()) { boolean hasBeenFixed = false; for (Identifier identifier: org.getIdentifier()) { if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE) && identifier.hasId()) { odsCode = identifier.getId(); identifier.setValue(odsCode); identifier.setId(null); hasBeenFixed = true; } } if (hasBeenFixed) { String newJson = FhirSerializationHelper.serializeResource(org); mostRecent.setResourceData(newJson); LOG.debug("Fixed Organization " + org.getId()); *//*LOG.debug(json); LOG.debug(newJson);*//* saveResourceWrapper(UUID.fromString(serviceId), mostRecent); fixed ++; } } } done ++; if (done % 100 == 0) { LOG.debug("Done " + done + ", Fixed " + fixed); } } LOG.debug("Done " + done + ", Fixed " + fixed); LOG.info("Finished Barts orgs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testPreparedStatements(String url, String user, String pass, String serviceId) { try { LOG.info("Testing Prepared Statements"); LOG.info("Url: " + url); LOG.info("user: " + user); LOG.info("pass: " + pass); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?"; long start = System.currentTimeMillis(); for (int i=0; i<10000; i++) { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); ps.setString(1, serviceId); ps.setString(2, "MILLPERSIDtoMRN"); ps.setString(3, UUID.randomUUID().toString()); ResultSet rs = ps.executeQuery(); while (rs.next()) { //do nothing } } finally { if (ps != null) { ps.close(); } } } long end = System.currentTimeMillis(); LOG.info("Took " + (end-start) + " ms"); //close connection conn.close(); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEncounters(String table) { LOG.info("Fixing encounters from " + table); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); Date cutoff = sdf.parse("2018-03-14 11:42"); EntityManager entityManager = ConnectionManager.getAdminEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<UUID> serviceIds = new ArrayList<>(); Map<UUID, UUID> hmSystems = new HashMap<>(); String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { UUID serviceId = UUID.fromString(rs.getString(1)); UUID systemId = UUID.fromString(rs.getString(2)); serviceIds.add(serviceId); hmSystems.put(serviceId, systemId); } rs.close(); statement.close(); entityManager.close(); for (UUID serviceId: serviceIds) { UUID systemId = hmSystems.get(serviceId); LOG.info("Doing service " + serviceId + " and system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId); List<UUID> exchangeIdsToProcess = new ArrayList<>(); for (UUID exchangeId: exchangeIds) { List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { Date d = audit.getStarted(); if (d.after(cutoff)) { exchangeIdsToProcess.add(exchangeId); break; } } } Map<String, ReferenceList> consultationNewChildMap = new HashMap<>(); Map<String, ReferenceList> observationChildMap = new HashMap<>(); Map<String, ReferenceList> newProblemChildren = new HashMap<>(); for (UUID exchangeId: exchangeIdsToProcess) { Exchange exchange = exchangeDal.getExchange(exchangeId); String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody()); String version = EmisCsvToFhirTransformer.determineVersion(files); List<String> interestingFiles = new ArrayList<>(); for (String file: files) { if (file.indexOf("CareRecord_Consultation") > -1 || file.indexOf("CareRecord_Observation") > -1 || file.indexOf("CareRecord_Diary") > -1 || file.indexOf("Prescribing_DrugRecord") > -1 || file.indexOf("Prescribing_IssueRecord") > -1 || file.indexOf("CareRecord_Problem") > -1) { interestingFiles.add(file); } } files = interestingFiles.toArray(new String[0]); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true); Consultation consultationParser = (Consultation)parsers.get(Consultation.class); while (consultationParser.nextRecord()) { CsvCell consultationGuid = consultationParser.getConsultationGuid(); CsvCell patientGuid = consultationParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); consultationNewChildMap.put(sourceId, new ReferenceList()); } Problem problemParser = (Problem)parsers.get(Problem.class); while (problemParser.nextRecord()) { CsvCell problemGuid = problemParser.getObservationGuid(); CsvCell patientGuid = problemParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); newProblemChildren.put(sourceId, new ReferenceList()); } //run this pre-transformer to pre-cache some stuff in the csv helper, which //is needed when working out the resource type that each observation would be saved as ObservationPreTransformer.transform(version, parsers, null, csvHelper); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { CsvCell observationGuid = observationParser.getObservationGuid(); CsvCell patientGuid = observationParser.getPatientGuid(); String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid); CsvCell codeId = observationParser.getCodeId(); if (codeId.isEmpty()) { continue; } ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId); if (obUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId); //resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); } Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString()); CsvCell consultationGuid = observationParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell problemGuid = observationParser.getProblemGuid(); if (!problemGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell parentObGuid = observationParser.getParentObservationGuid(); if (!parentObGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid); ReferenceList referenceList = observationChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); observationChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } } Diary diaryParser = (Diary)parsers.get(Diary.class); while (diaryParser.nextRecord()) { CsvCell consultationGuid = diaryParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { CsvCell diaryGuid = diaryParser.getDiaryGuid(); CsvCell patientGuid = diaryParser.getPatientGuid(); String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid); UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId); if (diaryUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId); } Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(diaryReference); } } IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class); while (issueRecordParser.nextRecord()) { CsvCell problemGuid = issueRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid(); CsvCell patientGuid = issueRecordParser.getPatientGuid(); String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid); UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId); if (issueRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId); } Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(issueRecordReference); } } DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class); while (drugRecordParser.nextRecord()) { CsvCell problemGuid = drugRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid(); CsvCell patientGuid = drugRecordParser.getPatientGuid(); String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid); UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId); if (drugRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId); } Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(drugRecordReference); } } for (AbstractCsvParser parser : parsers.values()) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix"); for (String encounterSourceId: consultationNewChildMap.keySet()) { ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId); //map to UUID UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId); if (encounterId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Encounter " + encounterId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(encounter); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix"); for (String sourceId: observationChildMap.keySet()) { ReferenceList childReferences = observationChildMap.get(sourceId); //map to UUID ResourceType resourceType = null; UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId); if (resourceId != null) { resourceType = ResourceType.Observation; } else { resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId); if (resourceId != null) { resourceType = ResourceType.DiagnosticReport; } else { continue; } } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId); if (history.isEmpty()) { //throw new Exception("Empty history for " + resourceType + " " + resourceId); continue; } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (resourceType == ResourceType.Observation) { if (wrapper.getResourceData() != null) { Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (observation.hasRelated()) { for (Observation.ObservationRelatedComponent related : observation.getRelated()) { Reference reference = related.getTarget(); childReferences.add(reference); } } } } else { if (wrapper.getResourceData() != null) { DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (report.hasResult()) { for (Reference reference : report.getResult()) { childReferences.add(reference); } } } } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData()); boolean changed = false; if (resourceType == ResourceType.Observation) { ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addChildObservation(reference)) { changed = true; } } } else { DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addResult(reference)) { changed = true; } } } if (changed) { String newJson = FhirSerializationHelper.serializeResource(resource); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); }*//* } LOG.info("Found " + newProblemChildren.size() + " Problems to fix"); for (String sourceId: newProblemChildren.keySet()) { ReferenceList childReferences = newProblemChildren.get(sourceId); //map to UUID UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId); if (conditionId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Condition " + conditionId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(condition); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } //mark as done String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';"; entityManager = ConnectionManager.getAdminEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); } */ /** * For each practice: * Go through all files processed since 14 March * Cache all links as above * Cache all Encounters saved too * <p> * For each Encounter referenced at all: * Retrieve latest version from resource current * Retrieve version prior to 14 March * Update current version with old references plus new ones * <p> * For each parent observation: * Retrieve latest version (could be observation or diagnostic report) * <p> * For each problem: * Retrieve latest version from resource current * Check if still a problem: * Retrieve version prior to 14 March * Update current version with old references plus new ones *//* LOG.info("Finished Fixing encounters from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception { if (wrapper.getVersion() == null) { throw new Exception("Can't update resource history without version UUID"); } if (wrapper.getResourceData() != null) { long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData()); wrapper.setResourceChecksum(new Long(checksum)); } EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); entityManager.getTransaction().begin(); String json = wrapper.getResourceData(); json = json.replace("'", "''"); json = json.replace("\\", "\\\\"); String patientId = ""; if (wrapper.getPatientId() != null) { patientId = wrapper.getPatientId().toString(); } String updateSql = "UPDATE resource_current" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE service_id = '" + wrapper.getServiceId() + "'" + " AND patient_id = '" + patientId + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" + " AND resource_id = '" + wrapper.getResourceId() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); //SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS"); //String createdAtStr = sdf.format(wrapper.getCreatedAt()); updateSql = "UPDATE resource_history" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE resource_id = '" + wrapper.getResourceId() + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" //+ " AND created_at = '" + createdAtStr + "'" + " AND version = '" + wrapper.getVersion() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); entityManager.getTransaction().commit(); } /*private static void populateNewSearchTable(String table) { LOG.info("Populating New Search Table"); try { EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<String> patientIds = new ArrayList<>(); Map<String, String> serviceIds = new HashMap<>(); String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String serviceId = rs.getString(2); patientIds.add(patientId); serviceIds.put(patientId, serviceId); } rs.close(); statement.close(); entityManager.close(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); LOG.info("Found " + patientIds.size() + " to do"); for (int i=0; i<patientIds.size(); i++) { String patientIdStr = patientIds.get(i); UUID patientId = UUID.fromString(patientIdStr); String serviceIdStr = serviceIds.get(patientIdStr); UUID serviceId = UUID.fromString(serviceIdStr); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr); if (patient != null) { LOG.debug("Updating for patient " + patientIdStr); patientSearchDal.update(serviceId, patient); LOG.debug("Done"); } else { List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientId); if (history.isEmpty()) { LOG.debug("No history found for patient " + patientIdStr); } else { ResourceWrapper first = history.get(0); if (!first.isDeleted()) { throw new Exception("Resource current null for " + ResourceType.Patient + " " + patientIdStr + " but not deleted in resource_history"); } //find first non-deleted instance and update for it, then delete for (ResourceWrapper historyItem: history) { if (!historyItem.isDeleted()) { patient = (Patient)FhirSerializationHelper.deserializeResource(historyItem.getResourceData()); LOG.debug("Patient is deleted, so updating for deleted patient " + patientIdStr); patientSearchDal.update(serviceId, patient); patientSearchDal.deletePatient(serviceId, patient); LOG.debug("Done"); break; } } } } //find episode of care //note, we don't have any current way to retrieve deleted episodes of care for a patient, so can only do this for non-deleted ones List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, patientId, ResourceType.EpisodeOfCare.toString()); for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { LOG.debug("Updating for episodeOfCare resource " + wrapper.getResourceId()); EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); patientSearchDal.update(serviceId, episodeOfCare); LOG.debug("Done"); } else { LOG.debug("EpisodeOfCare " + wrapper.getResourceId() + " is deleted"); } } String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';"; entityManager = ConnectionManager.getEdsEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); if (i % 5000 == 0) { LOG.info("Done " + (i+1) + " of " + patientIds.size()); } } entityManager.close(); LOG.info("Finished Populating New Search Table"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) { LOG.info("Creating Barts Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds); LOG.info("Finished Creating Barts Subset"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { for (File sourceFile: sourceDir.listFiles()) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } LOG.info("Doing dir " + sourceFile); createBartsSubsetForFile(sourceFile, destFile, personIds); } else { //we have some bad partial files in, so ignore them String ext = FilenameUtils.getExtension(name); if (ext.equalsIgnoreCase("filepart")) { continue; } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String baseName = FilenameUtils.getBaseName(name); String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex ++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i=0; i<expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ /*private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception { ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile fileObj : files) { String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length() + 1); String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage); File sourceFile = new File(sourceFilePath); String destFilePath = fileObj.getPath(); File destFile = new File(destFilePath); File destDir = destFile.getParentFile(); if (!destDir.exists()) { destDir.mkdirs(); } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String fileType = fileObj.getType(); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i = 0; i < expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ private static void copyFile(File src, File dst) throws Exception { FileInputStream fis = new FileInputStream(src); BufferedInputStream bis = new BufferedInputStream(fis); Files.copy(bis, dst.toPath()); bis.close(); } private static boolean isCerner22File(String fileType) throws Exception { if (fileType.equalsIgnoreCase("PPATI") || fileType.equalsIgnoreCase("PPREL") || fileType.equalsIgnoreCase("CDSEV") || fileType.equalsIgnoreCase("PPATH") || fileType.equalsIgnoreCase("RTTPE") || fileType.equalsIgnoreCase("AEATT") || fileType.equalsIgnoreCase("AEINV") || fileType.equalsIgnoreCase("AETRE") || fileType.equalsIgnoreCase("OPREF") || fileType.equalsIgnoreCase("OPATT") || fileType.equalsIgnoreCase("EALEN") || fileType.equalsIgnoreCase("EALSU") || fileType.equalsIgnoreCase("EALOF") || fileType.equalsIgnoreCase("HPSSP") || fileType.equalsIgnoreCase("IPEPI") || fileType.equalsIgnoreCase("IPWDS") || fileType.equalsIgnoreCase("DELIV") || fileType.equalsIgnoreCase("BIRTH") || fileType.equalsIgnoreCase("SCHAC") || fileType.equalsIgnoreCase("APPSL") || fileType.equalsIgnoreCase("DIAGN") || fileType.equalsIgnoreCase("PROCE") || fileType.equalsIgnoreCase("ORDER") || fileType.equalsIgnoreCase("DOCRP") || fileType.equalsIgnoreCase("DOCREF") || fileType.equalsIgnoreCase("CNTRQ") || fileType.equalsIgnoreCase("LETRS") || fileType.equalsIgnoreCase("LOREF") || fileType.equalsIgnoreCase("ORGREF") || fileType.equalsIgnoreCase("PRSNLREF") || fileType.equalsIgnoreCase("CVREF") || fileType.equalsIgnoreCase("NOMREF") || fileType.equalsIgnoreCase("EALIP") || fileType.equalsIgnoreCase("CLEVE") || fileType.equalsIgnoreCase("ENCNT") || fileType.equalsIgnoreCase("RESREF") || fileType.equalsIgnoreCase("PPNAM") || fileType.equalsIgnoreCase("PPADD") || fileType.equalsIgnoreCase("PPPHO") || fileType.equalsIgnoreCase("PPALI") || fileType.equalsIgnoreCase("PPINF") || fileType.equalsIgnoreCase("PPAGP") || fileType.equalsIgnoreCase("SURCC") || fileType.equalsIgnoreCase("SURCP") || fileType.equalsIgnoreCase("SURCA") || fileType.equalsIgnoreCase("SURCD") || fileType.equalsIgnoreCase("PDRES") || fileType.equalsIgnoreCase("PDREF") || fileType.equalsIgnoreCase("ABREF") || fileType.equalsIgnoreCase("CEPRS") || fileType.equalsIgnoreCase("ORDDT") || fileType.equalsIgnoreCase("STATREF") || fileType.equalsIgnoreCase("STATA") || fileType.equalsIgnoreCase("ENCINF") || fileType.equalsIgnoreCase("SCHDETAIL") || fileType.equalsIgnoreCase("SCHOFFER") || fileType.equalsIgnoreCase("PPGPORG") || fileType.equalsIgnoreCase("FAMILYHISTORY")) { return true; } else { return false; } } /*private static void fixSubscriberDbs() { LOG.info("Fixing Subscriber DBs"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches"); List<UUID> batchIds = new ArrayList<>(); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); batchIds.add(batchId); } String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } LOG.info("Finished Fixing Subscriber DBs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixReferralRequests() { LOG.info("Fixing Referral Requests"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; Set<UUID> patientIdsToPost = new HashSet<>(); for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper wrapper: wrappers) { String resourceType = wrapper.getResourceType(); if (!resourceType.equals(ResourceType.ReferralRequest.toString()) || wrapper.isDeleted()) { continue; } String json = wrapper.getResourceData(); ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json); *//*if (!referral.hasServiceRequested()) { continue; } CodeableConcept reason = referral.getServiceRequested().get(0); referral.setReason(reason); referral.getServiceRequested().clear();*//* if (!referral.hasReason()) { continue; } CodeableConcept reason = referral.getReason(); referral.setReason(null); referral.addServiceRequested(reason); json = FhirSerializationHelper.serializeResource(referral); wrapper.setResourceData(json); saveResourceWrapper(serviceId, wrapper); //add to the set of patients we know need sending on to the protocol queue patientIdsToPost.add(patientId); LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId); } //if our patient has just been fixed or was fixed before, post onto the protocol queue if (patientIdsToPost.contains(patientId)) { List<UUID> batchIds = new ArrayList<>(); batchIds.add(batchId); String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } } } LOG.info("Finished Fixing Referral Requests"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void applyEmisAdminCaches() { LOG.info("Applying Emis Admin Caches"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) { LOG.info(" Service not started, so skipping"); continue; } //get exchanges List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); if (exchangeIds.isEmpty()) { LOG.info(" No exchanges found, so skipping"); continue; } UUID firstExchangeId = exchangeIds.get(0); List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId); boolean appliedAdminCache = false; for (ExchangeEvent event: events) { if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) { appliedAdminCache = true; } } if (appliedAdminCache) { LOG.info(" Have already applied admin cache, so skipping"); continue; } Exchange exchange = exchangeDal.getExchange(firstExchangeId); String body = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(body); if (files.length == 0) { LOG.info(" No files in exchange " + firstExchangeId + " so skipping"); continue; } String firstFilePath = files[0]; String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath); } String sharingAgreementGuid = toks[4]; List<UUID> batchIds = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds); EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(), fhirResourceFiler.getExchangeId(), sharingAgreementGuid, true); ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(endpointSystemId); transformAudit.setExchangeId(firstExchangeId); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); LOG.info(" Going to apply admin resource cache"); csvHelper.applyAdminResourceCache(fhirResourceFiler); fhirResourceFiler.waitToFinish(); for (UUID batchId: batchIds) { LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIds.size())); boolean hadError = false; if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); hadError = true; } exchangeDal.save(transformAudit); //clear down the cache of reference mappings since they won't be of much use for the next Exchange IdHelper.clearCache(); if (hadError) { LOG.error(" <<<<<<Error applying resource cache!"); continue; } //add the event to say we've applied the cache AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache"); //post that ONE new batch ID onto the protocol queue String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Applying Emis Admin Caches"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixBartsEscapedFiles(String filePath) { LOG.info("Fixing Barts Escaped Files in " + filePath); try { fixBartsEscapedFilesInDir(new File(filePath)); LOG.info("Finished fixing Barts Escaped Files in " + filePath); } catch (Throwable t) { LOG.error("", t); } } /** * fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by * replacing the "delete" extracts with newly generated deltas that can be processed * before the re-bulk is done */ /*private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) { LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId()); *//*File tempDirLast = new File(tempDir, "last"); if (!tempDirLast.exists()) { if (!tempDirLast.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirLast); } tempDirLast.mkdirs(); } File tempDirEmpty = new File(tempDir, "empty"); if (!tempDirEmpty.exists()) { if (!tempDirEmpty.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirEmpty); } tempDirEmpty.mkdirs(); }*//* String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode); File f = new File(tempDir); if (f.exists()) { FileUtils.deleteDirectory(f); } UUID serviceUuid = service.getId(); UUID systemUuid = UUID.fromString(systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); //get all the exchanges, which are returned in reverse order, most recent first List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>(); Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>(); //reverse the exchange list and cache the files for each one List<Exchange> exchanges = new ArrayList<>(); for (int i = exchangesDesc.size() - 1; i >= 0; i--) { Exchange exchange = exchangesDesc.get(i); String exchangeBody = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); //drop out and ignore any exchanges containing the singular bespoke reg status files if (files.length <= 1) { continue; } //drop out and ignore any exchanges for the left and dead extracts, since we don't //expect to receive re-bulked data for the dead patients String firstFile = files[0]; if (firstFile.indexOf("LEFT_AND_DEAD") > -1) { continue; } exchanges.add(exchange); //populate the map of the files with the shared storage prefix List<String> fileList = Lists.newArrayList(files); hmExchangeFiles.put(exchange, fileList); //populate a map of the same files without the prefix files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (int j = 0; j < files.length; j++) { String file = files[j].substring(sharedStoragePath.length() + 1); files[j] = file; } fileList = Lists.newArrayList(files); hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList); } *//*exchanges.sort((o1, o2) -> { Date d1 = o1.getTimestamp(); Date d2 = o2.getTimestamp(); return d1.compareTo(d2); });*//* LOG.info("Found " + exchanges.size() + " exchanges and cached their files"); int indexDisabled = -1; int indexRebulked = -1; int indexOriginallyBulked = -1; //go back through them to find the extract where the re-bulk is and when it was disabled (the list is in date order, so we're iterating most-recent first) for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { indexDisabled = i; } else { if (indexDisabled == -1) { indexRebulked = i; } else { //if we've found a non-disabled extract older than the disabled ones, //then we've gone far enough back break; } } } //go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled) for (int i = indexDisabled - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { break; } indexOriginallyBulked = i; } if (indexOriginallyBulked > -1) { Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked); LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId()); } if (indexDisabled > -1) { Exchange exchangeDisabled = exchanges.get(indexDisabled); LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId()); } if (indexRebulked > -1) { Exchange exchangeRebulked = exchanges.get(indexRebulked); LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId()); } if (indexDisabled == -1 || indexRebulked == -1 || indexOriginallyBulked == -1) { throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")"); } //continueOrQuit(); Exchange exchangeRebulked = exchanges.get(indexRebulked); List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked); List<String> tempFilesCreated = new ArrayList<>(); Set<String> patientGuidsDeletedOrTooOld = new HashSet<>(); for (String rebulkFile : rebulkFiles) { String fileType = findFileType(rebulkFile); if (!isPatientFile(fileType)) { continue; } LOG.info("Doing " + fileType); String guidColumnName = getGuidColumnName(fileType); //find all the guids in the re-bulk Set<String> idsInRebulk = new HashSet<>(); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); String[] headers = null; try { headers = CsvHelper.getHeaderMapAsArray(csvParser); Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); //get the patient and row guid out of the file and cache in our set String id = record.get("PatientGuid"); if (!Strings.isNullOrEmpty(guidColumnName)) { id += "//" + record.get(guidColumnName); } idsInRebulk.add(id); } } finally { csvParser.close(); } LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile); //create a replacement file for the exchange the service was disabled String replacementDisabledFile = null; Exchange exchangeDisabled = exchanges.get(indexDisabled); List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled); for (String s : disabledFiles) { String disabledFileType = findFileType(s); if (disabledFileType.equals(fileType)) { replacementDisabledFile = FilenameUtils.concat(tempDir, s); File dir = new File(replacementDisabledFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } tempFilesCreated.add(s); LOG.info("Created replacement file " + replacementDisabledFile); } } FileWriter fileWriter = new FileWriter(replacementDisabledFile); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); Set<String> pastIdsProcessed = new HashSet<>(); //now go through all files of the same type PRIOR to the service was disabled //to find any rows that we'll need to explicitly delete because they were deleted while //the extract was disabled for (int i = indexDisabled - 1; i >= indexOriginallyBulked; i--) { Exchange exchange = exchanges.get(i); String originalFile = null; List<String> files = hmExchangeFiles.get(exchange); for (String s : files) { String originalFileType = findFileType(s); if (originalFileType.equals(fileType)) { originalFile = s; break; } } if (originalFile == null) { continue; } LOG.info(" Reading " + originalFile); reader = FileHelper.readFileReaderFromSharedStorage(originalFile); csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientGuid = record.get("PatientGuid"); //get the patient and row guid out of the file and cache in our set String uniqueId = patientGuid; if (!Strings.isNullOrEmpty(guidColumnName)) { uniqueId += "//" + record.get(guidColumnName); } //if we're already handled this record in a more recent extract, then skip it if (pastIdsProcessed.contains(uniqueId)) { continue; } pastIdsProcessed.add(uniqueId); //if this ID isn't deleted and isn't in the re-bulk then it means //it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted //from Emis Web while the extract feed was disabled //if the record is deleted, then we won't expect it in the re-bulk boolean deleted = Boolean.parseBoolean(record.get("Deleted")); if (deleted) { //if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes if (fileType.equals("Admin_Patient")) { patientGuidsDeletedOrTooOld.add(patientGuid); } continue; } //if it's not the patient file and we refer to a patient that we know //has been deleted, then skip this row, since we know we're deleting the entire patient record if (patientGuidsDeletedOrTooOld.contains(patientGuid)) { continue; } //if the re-bulk contains a record matching this one, then it's OK if (idsInRebulk.contains(uniqueId)) { continue; } //the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago), //so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped if (fileType.equals("Admin_Patient")) { //retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid); if (patientUuid == null) { throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]"); } Patient patientResource = (Patient) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString()); if (patientResource.hasDeceased()) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too EpisodeOfCare episodeResource = (EpisodeOfCare) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString()); if (episodeResource.hasPeriod() && !PeriodHelper.isActive(episodeResource.getPeriod())) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } } //create a new CSV record, carrying over the GUIDs from the original but marking as deleted String[] newRecord = new String[headers.length]; for (int j = 0; j < newRecord.length; j++) { String header = headers[j]; if (header.equals("PatientGuid") || header.equals("OrganisationGuid") || (!Strings.isNullOrEmpty(guidColumnName) && header.equals(guidColumnName))) { String val = record.get(header); newRecord[j] = val; } else if (header.equals("Deleted")) { newRecord[j] = "true"; } else { newRecord[j] = ""; } } csvPrinter.printRecord((Object[]) newRecord); csvPrinter.flush(); //log out the raw record that's missing from the original StringBuffer sb = new StringBuffer(); sb.append("Record not in re-bulk: "); for (int j = 0; j < record.size(); j++) { if (j > 0) { sb.append(","); } sb.append(record.get(j)); } LOG.info(sb.toString()); } } finally { csvParser.close(); } } csvPrinter.flush(); csvPrinter.close(); //also create a version of the CSV file with just the header and nothing else in for (int i = indexDisabled + 1; i < indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s : exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals(fileType)) { String emptyTempFile = FilenameUtils.concat(tempDir, s); File dir = new File(emptyTempFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } fileWriter = new FileWriter(emptyTempFile); bufferedWriter = new BufferedWriter(fileWriter); csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); csvPrinter.close(); tempFilesCreated.add(s); LOG.info("Created empty file " + emptyTempFile); } } } } //we also need to copy the restored sharing agreement file to replace all the period it was disabled String rebulkedSharingAgreementFile = null; for (String s : rebulkFiles) { String fileType = findFileType(s); if (fileType.equals("Agreements_SharingOrganisation")) { rebulkedSharingAgreementFile = s; } } for (int i = indexDisabled; i < indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s : exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals("Agreements_SharingOrganisation")) { String replacementFile = FilenameUtils.concat(tempDir, s); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile); File replacementFileObj = new File(replacementFile); Files.copy(inputStream, replacementFileObj.toPath()); inputStream.close(); tempFilesCreated.add(s); } } } //create a script to copy the files into S3 List<String> copyScript = new ArrayList<>(); copyScript.add("#!/bin/bash"); copyScript.add(""); for (String s : tempFilesCreated) { String localFile = FilenameUtils.concat(tempDir, s); copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s); } String scriptFile = FilenameUtils.concat(tempDir, "copy.sh"); FileUtils.writeLines(new File(scriptFile), copyScript); LOG.info("Finished - written files to " + tempDir); dumpFileSizes(new File(tempDir)); *//*continueOrQuit(); //back up every file where the service was disabled for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { //first download from S3 to the local temp dir InputStream inputStream = FileHelper.readFileFromSharedStorage(file); String fileName = FilenameUtils.getName(file); String tempPath = FilenameUtils.concat(tempDir, fileName); File downloadDestination = new File(tempPath); Files.copy(inputStream, downloadDestination.toPath()); //then write back to S3 in a sub-dir of the original file String backupPath = FilenameUtils.getPath(file); backupPath = FilenameUtils.concat(backupPath, "Original"); backupPath = FilenameUtils.concat(backupPath, fileName); FileHelper.writeFileToSharedStorage(backupPath, downloadDestination); LOG.info("Backed up " + file + " -> " + backupPath); //delete from temp dir downloadDestination.delete(); } } continueOrQuit(); //copy the new CSV files into the dir where it was disabled List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled); for (String disabledFile: disabledFiles) { String fileType = findFileType(disabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected temp file " + f); } FileHelper.writeFileToSharedStorage(disabledFile, f); LOG.info("Copied " + tempFile + " -> " + disabledFile); } continueOrQuit(); //empty the patient files for any extracts while the service was disabled for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange otherExchangeDisabled = exchanges.get(i); List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled); for (String otherDisabledFile: otherDisabledFiles) { String fileType = findFileType(otherDisabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(otherDisabledFile, f); LOG.info("Copied " + tempFile + " -> " + otherDisabledFile); } } continueOrQuit(); //copy the content of the sharing agreement file from when it was re-bulked for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File downloadDestination = new File(tempFile); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile); Files.copy(inputStream, downloadDestination.toPath()); tempFilesCreated.add(tempFile); } } //replace the sharing agreement file for all disabled extracts with the non-disabled one for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(file, f); LOG.info("Copied " + tempFile + " -> " + file); } } } LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId); continueOrQuit(); for (String tempFileCreated: tempFilesCreated) { File f = new File(tempFileCreated); if (f.exists()) { f.delete(); } }*//* } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void dumpFileSizes(File f) { if (f.isDirectory()) { for (File child : f.listFiles()) { dumpFileSizes(child); } } else { String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length()); LOG.info("" + f + " = " + totalSizeReadable); } }*/ /*private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception { List<String> files = fileMap.get(exchange); String file = findSharingAgreementFile(files); String name = FilenameUtils.getBaseName(file); String[] toks = name.split("_"); return toks[3]; }*/ private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception { String file = findSharingAgreementFile(files); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); CSVRecord record = iterator.next(); String s = record.get("Disabled"); boolean disabled = Boolean.parseBoolean(s); return disabled; } finally { csvParser.close(); } } private static void continueOrQuit() throws Exception { LOG.info("Enter y to continue, anything else to quit"); byte[] bytes = new byte[10]; System.in.read(bytes); char c = (char) bytes[0]; if (c != 'y' && c != 'Y') { System.out.println("Read " + c); System.exit(1); } } private static String getGuidColumnName(String fileType) { if (fileType.equals("Admin_Patient")) { //patient file just has patient GUID, nothing extra return null; } else if (fileType.equals("CareRecord_Consultation")) { return "ConsultationGuid"; } else if (fileType.equals("CareRecord_Diary")) { return "DiaryGuid"; } else if (fileType.equals("CareRecord_Observation")) { return "ObservationGuid"; } else if (fileType.equals("CareRecord_Problem")) { //there is no separate problem GUID, as it's just a modified observation return "ObservationGuid"; } else if (fileType.equals("Prescribing_DrugRecord")) { return "DrugRecordGuid"; } else if (fileType.equals("Prescribing_IssueRecord")) { return "IssueRecordGuid"; } else { throw new IllegalArgumentException(fileType); } } private static String findFileType(String filePath) { String fileName = FilenameUtils.getName(filePath); String[] toks = fileName.split("_"); String domain = toks[1]; String name = toks[2]; return domain + "_" + name; } private static boolean isPatientFile(String fileType) { if (fileType.equals("Admin_Patient") || fileType.equals("CareRecord_Consultation") || fileType.equals("CareRecord_Diary") || fileType.equals("CareRecord_Observation") || fileType.equals("CareRecord_Problem") || fileType.equals("Prescribing_DrugRecord") || fileType.equals("Prescribing_IssueRecord")) { //note the referral file doesn't have a Deleted column, so isn't in this list return true; } else { return false; } } private static String findSharingAgreementFile(List<String> files) throws Exception { for (String file : files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { return file; } } throw new Exception("Failed to find sharing agreement file in " + files.get(0)); } /*private static void testSlack() { LOG.info("Testing slack"); try { SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader"); LOG.info("Finished testing slack"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) { try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath); FileReader fr = new FileReader(filePath); BufferedReader br = new BufferedReader(fr); int count = 0; List<UUID> exchangeIdBatch = new ArrayList<>(); while (true) { String line = br.readLine(); if (line == null) { break; } UUID exchangeId = UUID.fromString(line); //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); if (audit != null && !audit.isResubmitted()) { audit.setResubmitted(true); auditRepository.save(audit); } count ++; exchangeIdBatch.add(exchangeId); if (exchangeIdBatch.size() >= 1000) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); exchangeIdBatch = new ArrayList<>(); LOG.info("Done " + count); } } if (!exchangeIdBatch.isEmpty()) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); LOG.info("Done " + count); } br.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void postToInbound(UUID serviceId, boolean all) { LOG.info("Posting to inbound for " + serviceId); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId); for (UUID exchangeId: errorState.getExchangeIdsInError()) { //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); //skip any exchange IDs we've already re-queued up to be processed again if (audit.isResubmitted()) { LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted"); continue; } LOG.debug("Re-posting " + audit.getExchangeId()); audit.setResubmitted(true); auditRepository.save(audit); //then re-submit the exchange to Rabbit MQ for the queue reader to pick up QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false); if (!all) { LOG.info("Posted first exchange, so stopping"); break; } } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void fixPatientSearchAllServices(String filterSystemId) { LOG.info("Fixing patient search for all services and system " + filterSystemId); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { fixPatientSearch(service.getId().toString(), filterSystemId); } LOG.info("Finished Fixing patient search for all services and system " + filterSystemId); } catch (Throwable t) { LOG.error("", t); } } private static void fixPatientSearch(String serviceId, String filterSystemId) { LOG.info("Fixing patient search for service " + serviceId); try { UUID serviceUuid = UUID.fromString(serviceId); UUID filterSystemUuid = null; if (!Strings.isNullOrEmpty(filterSystemId)) { filterSystemUuid = UUID.fromString(filterSystemId); } ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Set<UUID> patientsDone = new HashSet<>(); Service service = serviceDal.getById(serviceUuid); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { if (filterSystemUuid != null && !filterSystemUuid.equals(systemId)) { continue; } List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId); LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId); for (UUID exchangeId : exchanges) { List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } if (patientsDone.contains(patientId)) { continue; } patientsDone.add(patientId); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId); if (wrapper != null) { String json = wrapper.getResourceData(); if (!Strings.isNullOrEmpty(json)) { Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json); patientSearchDal.update(serviceUuid, fhirPatient); } } if (patientsDone.size() % 1000 == 0) { LOG.info("Done " + patientsDone.size()); } } } } LOG.info("Done " + patientsDone.size()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished fixing patient search for " + serviceId); }*/ private static void runSql(String host, String username, String password, String sqlFile) { LOG.info("Running SQL on " + host + " from " + sqlFile); Connection conn = null; Statement statement = null; try { File f = new File(sqlFile); if (!f.exists()) { LOG.error("" + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); /*String combined = String.join("\n", lines); LOG.info("Going to run SQL"); LOG.info(combined);*/ //load driver Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", username); props.setProperty("password", password); conn = DriverManager.getConnection(host, props); LOG.info("Opened connection"); statement = conn.createStatement(); long totalStart = System.currentTimeMillis(); for (String sql : lines) { sql = sql.trim(); if (sql.startsWith("--") || sql.startsWith("/*") || Strings.isNullOrEmpty(sql)) { continue; } LOG.info(""); LOG.info(sql); long start = System.currentTimeMillis(); boolean hasResultSet = statement.execute(sql); long end = System.currentTimeMillis(); LOG.info("SQL took " + (end - start) + "ms"); if (hasResultSet) { while (true) { ResultSet rs = statement.getResultSet(); int cols = rs.getMetaData().getColumnCount(); List<String> colHeaders = new ArrayList<>(); for (int i = 0; i < cols; i++) { String header = rs.getMetaData().getColumnName(i + 1); colHeaders.add(header); } String colHeaderStr = String.join(", ", colHeaders); LOG.info(colHeaderStr); while (rs.next()) { List<String> row = new ArrayList<>(); for (int i = 0; i < cols; i++) { Object o = rs.getObject(i + 1); if (rs.wasNull()) { row.add("<null>"); } else { row.add(o.toString()); } } String rowStr = String.join(", ", row); LOG.info(rowStr); } if (!statement.getMoreResults()) { break; } } } else { int updateCount = statement.getUpdateCount(); LOG.info("Updated " + updateCount + " Row(s)"); } } long totalEnd = System.currentTimeMillis(); LOG.info(""); LOG.info("Total time taken " + (totalEnd - totalStart) + "ms"); } catch (Throwable t) { LOG.error("", t); } finally { if (statement != null) { try { statement.close(); } catch (Exception ex) { } } if (conn != null) { try { conn.close(); } catch (Exception ex) { } } LOG.info("Closed connection"); } LOG.info("Finished Testing DB Size Limit"); } /*private static void fixExchangeBatches() { LOG.info("Starting Fixing Exchange Batches"); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); List<Service> services = serviceDalI.getAll(); for (Service service: services) { LOG.info("Doing " + service.getName()); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId()); for (UUID exchangeId: exchangeIds) { LOG.info(" Exchange " + exchangeId); List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId()); if (resources.isEmpty()) { continue; } ResourceWrapper first = resources.get(0); UUID patientId = first.getPatientId(); if (patientId != null) { exchangeBatch.setEdsPatientId(patientId); exchangeBatchDalI.save(exchangeBatch); LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId()); } } } } LOG.info("Finished Fixing Exchange Batches"); } catch (Exception ex) { LOG.error("", ex); } }*/ /** * exports ADT Encounters for patients based on a CSV file produced using the below SQL * --USE EDS DATABASE * <p> * -- barts b5a08769-cbbe-4093-93d6-b696cd1da483 * -- homerton 962d6a9a-5950-47ac-9e16-ebee56f9507a * <p> * create table adt_patients ( * service_id character(36), * system_id character(36), * nhs_number character varying(10), * patient_id character(36) * ); * <p> * -- delete from adt_patients; * <p> * select * from patient_search limit 10; * select * from patient_link limit 10; * <p> * insert into adt_patients * select distinct ps.service_id, ps.system_id, ps.nhs_number, ps.patient_id * from patient_search ps * join patient_link pl * on pl.patient_id = ps.patient_id * join patient_link pl2 * on pl.person_id = pl2.person_id * join patient_search ps2 * on ps2.patient_id = pl2.patient_id * where * ps.service_id IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a') * and ps2.service_id NOT IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a'); * <p> * <p> * select count(1) from adt_patients limit 100; * select * from adt_patients limit 100; * <p> * <p> * <p> * <p> * ---MOVE TABLE TO HL7 RECEIVER DB * <p> * select count(1) from adt_patients; * <p> * -- top 1000 patients with messages * <p> * select * from mapping.resource_uuid where resource_type = 'Patient' limit 10; * <p> * select * from log.message limit 10; * <p> * create table adt_patient_counts ( * nhs_number character varying(100), * count int * ); * <p> * insert into adt_patient_counts * select pid1, count(1) * from log.message * where pid1 is not null * and pid1 <> '' * group by pid1; * <p> * select * from adt_patient_counts order by count desc limit 100; * <p> * alter table adt_patients * add count int; * <p> * update adt_patients * set count = adt_patient_counts.count * from adt_patient_counts * where adt_patients.nhs_number = adt_patient_counts.nhs_number; * <p> * select count(1) from adt_patients where nhs_number is null; * <p> * select * from adt_patients * where nhs_number is not null * and count is not null * order by count desc limit 1000; */ /*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) { LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath); try { File sourceFile = new File(sourceCsvPath); CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); //"service_id","system_id","nhs_number","patient_id","count" int count = 0; HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>(); HashMap<UUID, Integer> patientIds = new HashMap<>(); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); count ++; String serviceId = csvRecord.get("service_id"); String systemId = csvRecord.get("system_id"); String patientId = csvRecord.get("patient_id"); UUID serviceUuid = UUID.fromString(serviceId); List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid); if (systemIds == null) { systemIds = new ArrayList<>(); serviceAndSystemIds.put(serviceUuid, systemIds); } systemIds.add(UUID.fromString(systemId)); patientIds.put(UUID.fromString(patientId), new Integer(count)); } csvParser.close(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ParserPool parser = new ParserPool(); Map<Integer, List<Object[]>> patientRows = new HashMap<>(); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); for (UUID serviceId: serviceAndSystemIds.keySet()) { //List<UUID> systemIds = serviceAndSystemIds.get(serviceId); Service service = serviceDalI.getById(serviceId); String serviceName = service.getName(); LOG.info("Doing service " + serviceId + " " + serviceName); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId); LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan"); int exchangeCount = 0; for (UUID exchangeId: exchangeIds) { exchangeCount ++; if (exchangeCount % 1000 == 0) { LOG.info("Done " + exchangeCount + " exchanges"); } List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { UUID patientId = exchangeBatch.getEdsPatientId(); if (patientId != null && !patientIds.containsKey(patientId)) { continue; } Integer patientIdInt = patientIds.get(patientId); //get encounters for exchange batch UUID batchId = exchangeBatch.getBatchId(); List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper resourceWrapper: resourceWrappers) { if (resourceWrapper.isDeleted()) { continue; } String resourceType = resourceWrapper.getResourceType(); if (!resourceType.equals(ResourceType.Encounter.toString())) { continue; } LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId()); String json = resourceWrapper.getResourceData(); Encounter fhirEncounter = (Encounter)parser.parse(json); Date date = null; if (fhirEncounter.hasPeriod()) { Period period = fhirEncounter.getPeriod(); if (period.hasStart()) { date = period.getStart(); } } String episodeId = null; if (fhirEncounter.hasEpisodeOfCare()) { Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference); EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirEpisode != null) { if (fhirEpisode.hasIdentifier()) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID); if (Strings.isNullOrEmpty(episodeId)) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID); } } } } String adtType = null; String adtCode = null; Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE); if (extension != null) { CodeableConcept codeableConcept = (CodeableConcept) extension.getValue(); Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE); if (hl7MessageTypeCoding != null) { adtType = hl7MessageTypeCoding.getDisplay(); adtCode = hl7MessageTypeCoding.getCode(); } } else { //for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body try { Exchange exchange = exchangeDalI.getExchange(exchangeId); String exchangeBody = exchange.getBody(); Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody); for (Bundle.BundleEntryComponent entry: bundle.getEntry()) { if (entry.getResource() != null && entry.getResource() instanceof MessageHeader) { MessageHeader header = (MessageHeader)entry.getResource(); if (header.hasEvent()) { Coding coding = header.getEvent(); adtType = coding.getDisplay(); adtCode = coding.getCode(); } } } } catch (Exception ex) { //if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them } } String cls = null; if (fhirEncounter.hasClass_()) { Encounter.EncounterClass encounterClass = fhirEncounter.getClass_(); if (encounterClass == Encounter.EncounterClass.OTHER && fhirEncounter.hasClass_Element() && fhirEncounter.getClass_Element().hasExtension()) { for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) { if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) { //not 100% of the type of the value, so just append to a String cls = "" + classExtension.getValue(); } } } if (Strings.isNullOrEmpty(cls)) { cls = encounterClass.toCode(); } } String type = null; if (fhirEncounter.hasType()) { //only seem to ever have one type CodeableConcept codeableConcept = fhirEncounter.getType().get(0); type = codeableConcept.getText(); } String status = null; if (fhirEncounter.hasStatus()) { Encounter.EncounterState encounterState = fhirEncounter.getStatus(); status = encounterState.toCode(); } String location = null; String locationType = null; if (fhirEncounter.hasLocation()) { //first location is always the current location Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0); if (encounterLocation.hasLocation()) { Reference locationReference = encounterLocation.getLocation(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference); Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirLocation != null) { if (fhirLocation.hasName()) { location = fhirLocation.getName(); } if (fhirLocation.hasType()) { CodeableConcept typeCodeableConcept = fhirLocation.getType(); if (typeCodeableConcept.hasCoding()) { Coding coding = typeCodeableConcept.getCoding().get(0); locationType = coding.getDisplay(); } } } } } String clinician = null; if (fhirEncounter.hasParticipant()) { //first participant seems to be the interesting one Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0); if (encounterParticipant.hasIndividual()) { Reference practitionerReference = encounterParticipant.getIndividual(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference); Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirPractitioner != null) { if (fhirPractitioner.hasName()) { HumanName name = fhirPractitioner.getName(); clinician = name.getText(); if (Strings.isNullOrEmpty(clinician)) { clinician = ""; for (StringType s: name.getPrefix()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getGiven()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getFamily()) { clinician += s.getValueNotNull(); clinician += " "; } clinician = clinician.trim(); } } } } } Object[] row = new Object[12]; row[0] = serviceName; row[1] = patientIdInt.toString(); row[2] = sdfOutput.format(date); row[3] = episodeId; row[4] = adtCode; row[5] = adtType; row[6] = cls; row[7] = type; row[8] = status; row[9] = location; row[10] = locationType; row[11] = clinician; List<Object[]> rows = patientRows.get(patientIdInt); if (rows == null) { rows = new ArrayList<>(); patientRows.put(patientIdInt, rows); } rows.add(row); } } } } String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"}; FileWriter fileWriter = new FileWriter(outputPath); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader(outputColumnHeaders) .withQuote('"'); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format); for (int i=0; i <= count; i++) { Integer patientIdInt = new Integer(i); List<Object[]> rows = patientRows.get(patientIdInt); if (rows != null) { for (Object[] row: rows) { csvPrinter.printRecord(row); } } } csvPrinter.close(); bufferedWriter.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath); }*/ /*private static void registerShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { LOG.info(""); try { Thread.sleep(5000); } catch (Throwable ex) { LOG.error("", ex); } LOG.info("Done"); } }); }*/ /*private static void findEmisStartDates(String path, String outputPath) { LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss"); Map<String, Date> startDates = new HashMap<>(); Map<String, String> servers = new HashMap<>(); Map<String, String> names = new HashMap<>(); Map<String, String> odsCodes = new HashMap<>(); Map<String, String> cdbNumbers = new HashMap<>(); Map<String, Set<String>> distinctPatients = new HashMap<>(); File root = new File(path); for (File sftpRoot : root.listFiles()) { LOG.info("Checking " + sftpRoot); Map<Date, File> extracts = new HashMap<>(); List<Date> extractDates = new ArrayList<>(); for (File extractRoot : sftpRoot.listFiles()) { Date d = sdf.parse(extractRoot.getName()); //LOG.info("" + extractRoot.getName() + " -> " + d); extracts.put(d, extractRoot); extractDates.add(d); } Collections.sort(extractDates); for (Date extractDate : extractDates) { File extractRoot = extracts.get(extractDate); LOG.info("Checking " + extractRoot); //read the sharing agreements file //e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv File sharingAgreementsFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("agreements_sharingorganisation") > -1 && name.endsWith(".csv")) { sharingAgreementsFile = f; break; } } if (sharingAgreementsFile == null) { LOG.info("Null agreements file for " + extractRoot); continue; } CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String activated = csvRecord.get("IsActivated"); String disabled = csvRecord.get("Disabled"); servers.put(orgGuid, sftpRoot.getName()); if (activated.equalsIgnoreCase("true")) { if (disabled.equalsIgnoreCase("false")) { Date d = sdf.parse(extractRoot.getName()); Date existingDate = startDates.get(orgGuid); if (existingDate == null) { startDates.put(orgGuid, d); } } else { if (startDates.containsKey(orgGuid)) { startDates.put(orgGuid, null); } } } } } finally { csvParser.close(); } //go through orgs file to get name, ods and cdb codes File orgsFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_organisation_") > -1 && name.endsWith(".csv")) { orgsFile = f; break; } } csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String name = csvRecord.get("OrganisationName"); String odsCode = csvRecord.get("ODSCode"); String cdb = csvRecord.get("CDB"); names.put(orgGuid, name); odsCodes.put(orgGuid, odsCode); cdbNumbers.put(orgGuid, cdb); } } finally { csvParser.close(); } //go through patients file to get count File patientFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_patient_") > -1 && name.endsWith(".csv")) { patientFile = f; break; } } csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String patientGuid = csvRecord.get("PatientGuid"); String deleted = csvRecord.get("Deleted"); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); if (distinctPatientSet == null) { distinctPatientSet = new HashSet<>(); distinctPatients.put(orgGuid, distinctPatientSet); } if (deleted.equalsIgnoreCase("true")) { distinctPatientSet.remove(patientGuid); } else { distinctPatientSet.add(patientGuid); } } } finally { csvParser.close(); } } } SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); StringBuilder sb = new StringBuilder(); sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients"); for (String orgGuid : startDates.keySet()) { Date startDate = startDates.get(orgGuid); String server = servers.get(orgGuid); String name = names.get(orgGuid); String odsCode = odsCodes.get(orgGuid); String cdbNumber = cdbNumbers.get(orgGuid); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); String startDateDesc = null; if (startDate != null) { startDateDesc = sdfOutput.format(startDate); } Long countDistinctPatients = null; if (distinctPatientSet != null) { countDistinctPatients = new Long(distinctPatientSet.size()); } sb.append("\n"); sb.append("\"" + name + "\""); sb.append(","); sb.append("\"" + odsCode + "\""); sb.append(","); sb.append("\"" + cdbNumber + "\""); sb.append(","); sb.append("\"" + orgGuid + "\""); sb.append(","); sb.append(startDateDesc); sb.append(","); sb.append("\"" + server + "\""); sb.append(","); sb.append(countDistinctPatients); } LOG.info(sb.toString()); FileUtils.writeStringToFile(new File(outputPath), sb.toString()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath); } private static void findEncounterTerms(String path, String outputPath) { LOG.info("Finding Encounter Terms from " + path); Map<String, Long> hmResults = new HashMap<>(); //source term, source term snomed ID, source term snomed term - count try { File root = new File(path); File[] files = root.listFiles(); for (File readerRoot : files) { //emis001 LOG.info("Finding terms in " + readerRoot); //first read in all the coding files to build up our map of codes Map<String, String> hmCodes = new HashMap<>(); for (File dateFolder : readerRoot.listFiles()) { LOG.info("Looking for codes in " + dateFolder); File f = findFile(dateFolder, "Coding_ClinicalCode"); if (f == null) { LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String codeId = csvRecord.get("CodeId"); String term = csvRecord.get("Term"); String snomed = csvRecord.get("SnomedCTConceptId"); hmCodes.put(codeId, snomed + ",\"" + term + "\""); } csvParser.close(); } SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); Date cutoff = dateFormat.parse("2017-01-01"); //now process the consultation files themselves for (File dateFolder : readerRoot.listFiles()) { LOG.info("Looking for consultations in " + dateFolder); File f = findFile(dateFolder, "CareRecord_Consultation"); if (f == null) { LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String term = csvRecord.get("ConsultationSourceTerm"); String codeId = csvRecord.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(term) && Strings.isNullOrEmpty(codeId)) { continue; } String date = csvRecord.get("EffectiveDate"); if (Strings.isNullOrEmpty(date)) { continue; } Date d = dateFormat.parse(date); if (d.before(cutoff)) { continue; } String line = "\"" + term + "\","; if (!Strings.isNullOrEmpty(codeId)) { String codeLookup = hmCodes.get(codeId); if (codeLookup == null) { LOG.error("Failed to find lookup for codeID " + codeId); continue; } line += codeLookup; } else { line += ","; } Long count = hmResults.get(line); if (count == null) { count = new Long(1); } else { count = new Long(count.longValue() + 1); } hmResults.put(line, count); } csvParser.close(); } } //save results to file StringBuilder output = new StringBuilder(); output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\""); output.append("\r\n"); for (String line : hmResults.keySet()) { Long count = hmResults.get(line); String combined = line + "," + count; output.append(combined); output.append("\r\n"); } LOG.info("FInished"); LOG.info(output.toString()); FileUtils.writeStringToFile(new File(outputPath), output.toString()); LOG.info("written output to " + outputPath); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished finding Encounter Terms from " + path); } private static File findFile(File root, String token) throws Exception { for (File f : root.listFiles()) { String s = f.getName(); if (s.indexOf(token) > -1) { return f; } } return null; }*/ /*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) { LOG.info("Starting Populating Protocol Queue for " + serviceIdStr); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); if (serviceIdStr.equalsIgnoreCase("All")) { serviceIdStr = null; } try { List<Service> services = new ArrayList<>(); if (Strings.isNullOrEmpty(serviceIdStr)) { services = serviceRepository.getAll(); } else { UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); services.add(service); } for (Service service: services) { List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId()); LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName()); if (startingExchangeId != null) { UUID startingExchangeUuid = UUID.fromString(startingExchangeId); if (exchangeIds.contains(startingExchangeUuid)) { //if in the list, remove everything up to and including the starting exchange int index = exchangeIds.indexOf(startingExchangeUuid); LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point"); for (int i=index; i>=0; i--) { exchangeIds.remove(i); } startingExchangeId = null; } else { //if not in the list, skip all these exchanges LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping"); continue; } } QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true); } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Populating Protocol Queue for " + serviceIdStr); }*/ /*private static void findDeletedOrgs() { LOG.info("Starting finding deleted orgs"); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); List<Service> services = new ArrayList<>(); try { for (Service service: serviceRepository.getAll()) { services.add(service); } } catch (Exception ex) { LOG.error("", ex); } services.sort((o1, o2) -> { String name1 = o1.getName(); String name2 = o2.getName(); return name1.compareToIgnoreCase(name2); }); for (Service service: services) { try { UUID serviceUuid = service.getId(); List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date()); LOG.info("Service: " + service.getName() + " " + service.getLocalId()); if (exchangeByServices.isEmpty()) { LOG.info(" no exchange found!"); continue; } Exchange exchangeByService = exchangeByServices.get(0); UUID exchangeId = exchangeByService.getId(); Exchange exchange = auditRepository.getExchange(exchangeId); Map<String, String> headers = exchange.getHeaders(); String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid); UUID systemUuid = UUID.fromString(systemUuidStr); int batches = countBatches(exchangeId, serviceUuid, systemUuid); LOG.info(" Most recent exchange had " + batches + " batches"); if (batches > 1 && batches < 2000) { continue; } //go back until we find the FIRST exchange where it broke exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date()); for (int i=0; i<exchangeByServices.size(); i++) { exchangeByService = exchangeByServices.get(i); exchangeId = exchangeByService.getId(); batches = countBatches(exchangeId, serviceUuid, systemUuid); exchange = auditRepository.getExchange(exchangeId); Date timestamp = exchange.getTimestamp(); if (batches < 1 || batches > 2000) { LOG.info(" " + timestamp + " had " + batches); } if (batches > 1 && batches < 2000) { LOG.info(" " + timestamp + " had " + batches); break; } } } catch (Exception ex) { LOG.error("", ex); } } LOG.info("Finished finding deleted orgs"); }*/ private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception { int batches = 0; ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit : audits) { if (audit.getNumberBatchesCreated() != null) { batches += audit.getNumberBatchesCreated(); } } return batches; } /*private static void fixExchanges(UUID justThisService) { LOG.info("Fixing exchanges"); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId : exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } boolean changed = false; String body = exchange.getBody(); String[] files = body.split("\n"); if (files.length == 0) { continue; } for (int i=0; i<files.length; i++) { String original = files[i]; //remove /r characters String trimmed = original.trim(); //add the new prefix if (!trimmed.startsWith("sftpreader/EMIS001/")) { trimmed = "sftpreader/EMIS001/" + trimmed; } if (!original.equals(trimmed)) { files[i] = trimmed; changed = true; } } if (changed) { LOG.info("Fixed exchange " + exchangeId); LOG.info(body); body = String.join("\n", files); exchange.setBody(body); AuditWriter.writeExchange(exchange); } } } LOG.info("Fixed exchanges"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void deleteDataForService(UUID serviceId) { Service dbService = new ServiceRepository().getById(serviceId); //the delete will take some time, so do the delete in a separate thread LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId()); FhirDeletionService deletor = new FhirDeletionService(dbService); try { deletor.deleteData(); LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId()); } catch (Exception ex) { LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex); } }*/ /*private static void fixProblems(UUID serviceId, String sharedStoragePath, boolean testMode) { LOG.info("Fixing problems for service " + serviceId); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); List<ExchangeByService> exchangeByServiceList = auditRepository.getExchangesByService(serviceId, Integer.MAX_VALUE); //go backwards as the most recent is first for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); UUID exchangeId = exchangeByService.getExchangeId(); LOG.info("Doing exchange " + exchangeId); EmisCsvHelper helper = null; try { Exchange exchange = AuditWriter.readExchange(exchangeId); String exchangeBody = exchange.getBody(); String[] files = exchangeBody.split(java.lang.System.lineSeparator()); File orgDirectory = validateAndFindCommonDirectory(sharedStoragePath, files); Map<Class, AbstractCsvParser> allParsers = new HashMap<>(); String properVersion = null; String[] versions = new String[]{EmisCsvToFhirTransformer.VERSION_5_0, EmisCsvToFhirTransformer.VERSION_5_1, EmisCsvToFhirTransformer.VERSION_5_3, EmisCsvToFhirTransformer.VERSION_5_4}; for (String version: versions) { try { List<AbstractCsvParser> parsers = new ArrayList<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(Observation.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(DrugRecord.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(IssueRecord.class, orgDirectory, version, true, parsers); for (AbstractCsvParser parser: parsers) { Class cls = parser.getClass(); allParsers.put(cls, parser); } properVersion = version; } catch (Exception ex) { //ignore } } if (allParsers.isEmpty()) { throw new Exception("Failed to open parsers for exchange " + exchangeId + " in folder " + orgDirectory); } UUID systemId = exchange.getHeaderAsUuid(HeaderKeys.SenderSystemUuid); //FhirResourceFiler dummyFiler = new FhirResourceFiler(exchangeId, serviceId, systemId, null, null, 10); if (helper == null) { helper = new EmisCsvHelper(findDataSharingAgreementGuid(new ArrayList<>(allParsers.values()))); } ObservationPreTransformer.transform(properVersion, allParsers, null, helper); IssueRecordPreTransformer.transform(properVersion, allParsers, null, helper); DrugRecordPreTransformer.transform(properVersion, allParsers, null, helper); Map<String, List<String>> problemChildren = helper.getProblemChildMap(); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (Map.Entry<String, List<String>> entry : problemChildren.entrySet()) { String patientLocallyUniqueId = entry.getKey().split(":")[0]; UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientLocallyUniqueId); if (edsPatientId == null) { throw new Exception("Failed to find edsPatientId for local Patient ID " + patientLocallyUniqueId + " in exchange " + exchangeId); } //find the batch ID for our patient UUID batchId = null; for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null && exchangeBatch.getEdsPatientId().equals(edsPatientId)) { batchId = exchangeBatch.getBatchId(); break; } } if (batchId == null) { throw new Exception("Failed to find batch ID for eds Patient ID " + edsPatientId + " in exchange " + exchangeId); } //find the EDS ID for our problem UUID edsProblemId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Condition, entry.getKey()); if (edsProblemId == null) { LOG.warn("No edsProblemId found for local ID " + entry.getKey() + " - assume bad data referring to non-existing problem?"); //throw new Exception("Failed to find edsProblemId for local Patient ID " + problemLocallyUniqueId + " in exchange " + exchangeId); } //convert our child IDs to EDS references List<Reference> references = new ArrayList<>(); HashSet<String> contentsSet = new HashSet<>(); contentsSet.addAll(entry.getValue()); for (String referenceValue : contentsSet) { Reference reference = ReferenceHelper.createReference(referenceValue); ReferenceComponents components = ReferenceHelper.getReferenceComponents(reference); String locallyUniqueId = components.getId(); ResourceType resourceType = components.getResourceType(); UUID edsResourceId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); Reference globallyUniqueReference = ReferenceHelper.createReference(resourceType, edsResourceId.toString()); references.add(globallyUniqueReference); } //find the resource for the problem itself ResourceByExchangeBatch problemResourceByExchangeBatch = null; List<ResourceByExchangeBatch> resources = resourceRepository.getResourcesForBatch(batchId, ResourceType.Condition.toString()); for (ResourceByExchangeBatch resourceByExchangeBatch: resources) { if (resourceByExchangeBatch.getResourceId().equals(edsProblemId)) { problemResourceByExchangeBatch = resourceByExchangeBatch; break; } } if (problemResourceByExchangeBatch == null) { throw new Exception("Problem not found for edsProblemId " + edsProblemId + " for exchange " + exchangeId); } if (problemResourceByExchangeBatch.getIsDeleted()) { LOG.warn("Problem " + edsProblemId + " is deleted, so not adding to it for exchange " + exchangeId); continue; } String json = problemResourceByExchangeBatch.getResourceData(); Condition fhirProblem = (Condition)PARSER_POOL.parse(json); //update the problems if (fhirProblem.hasContained()) { if (fhirProblem.getContained().size() > 1) { throw new Exception("Problem " + edsProblemId + " is has " + fhirProblem.getContained().size() + " contained resources for exchange " + exchangeId); } fhirProblem.getContained().clear(); } List_ list = new List_(); list.setId("Items"); fhirProblem.getContained().add(list); Extension extension = ExtensionConverter.findExtension(fhirProblem, FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE); if (extension == null) { Reference listReference = ReferenceHelper.createInternalReference("Items"); fhirProblem.addExtension(ExtensionConverter.createExtension(FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE, listReference)); } for (Reference reference : references) { list.addEntry().setItem(reference); } String newJson = FhirSerializationHelper.serializeResource(fhirProblem); if (newJson.equals(json)) { LOG.warn("Skipping edsProblemId " + edsProblemId + " as JSON hasn't changed"); continue; } problemResourceByExchangeBatch.setResourceData(newJson); String resourceType = problemResourceByExchangeBatch.getResourceType(); UUID versionUuid = problemResourceByExchangeBatch.getVersion(); ResourceHistory problemResourceHistory = resourceRepository.getResourceHistoryByKey(edsProblemId, resourceType, versionUuid); problemResourceHistory.setResourceData(newJson); problemResourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); ResourceByService problemResourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType, edsProblemId); if (problemResourceByService.getResourceData() == null) { problemResourceByService = null; LOG.warn("Not updating edsProblemId " + edsProblemId + " for exchange " + exchangeId + " as it's been subsequently delrted"); } else { problemResourceByService.setResourceData(newJson); } //save back to THREE tables if (!testMode) { resourceRepository.save(problemResourceByExchangeBatch); resourceRepository.save(problemResourceHistory); if (problemResourceByService != null) { resourceRepository.save(problemResourceByService); } LOG.info("Fixed edsProblemId " + edsProblemId + " for exchange Id " + exchangeId); } else { LOG.info("Would change edsProblemId " + edsProblemId + " to new JSON"); LOG.info(newJson); } } } catch (Exception ex) { LOG.error("Failed on exchange " + exchangeId, ex); break; } } LOG.info("Finished fixing problems for service " + serviceId); } private static String findDataSharingAgreementGuid(List<AbstractCsvParser> parsers) throws Exception { //we need a file name to work out the data sharing agreement ID, so just the first file we can find File f = parsers .iterator() .next() .getFile(); String name = Files.getNameWithoutExtension(f.getName()); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + f.getName()); } return toks[4]; } private static void closeParsers(Collection<AbstractCsvParser> parsers) { for (AbstractCsvParser parser : parsers) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } private static File validateAndFindCommonDirectory(String sharedStoragePath, String[] files) throws Exception { String organisationDir = null; for (String file: files) { File f = new File(sharedStoragePath, file); if (!f.exists()) { LOG.error("Failed to find file {} in shared storage {}", file, sharedStoragePath); throw new FileNotFoundException("" + f + " doesn't exist"); } //LOG.info("Successfully found file {} in shared storage {}", file, sharedStoragePath); try { File orgDir = f.getParentFile(); if (organisationDir == null) { organisationDir = orgDir.getAbsolutePath(); } else { if (!organisationDir.equalsIgnoreCase(orgDir.getAbsolutePath())) { throw new Exception(); } } } catch (Exception ex) { throw new FileNotFoundException("" + f + " isn't in the expected directory structure within " + organisationDir); } } return new File(organisationDir); }*/ /*private static void testLogging() { while (true) { System.out.println("Checking logging at " + System.currentTimeMillis()); try { Thread.sleep(4000); } catch (Exception e) { e.printStackTrace(); } LOG.trace("trace logging"); LOG.debug("debug logging"); LOG.info("info logging"); LOG.warn("warn logging"); LOG.error("error logging"); } } */ /*private static void fixExchangeProtocols() { LOG.info("Fixing exchange protocols"); AuditRepository auditRepository = new AuditRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); LOG.info("Processing exchange " + exchangeId); Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); List<String> newIds = new ArrayList<>(); String protocolJson = headers.get(HeaderKeys.Protocols); if (!headers.containsKey(HeaderKeys.Protocols)) { try { List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr); // Get protocols where service is publisher newIds = libraryItemList.stream() .filter( libraryItem -> libraryItem.getProtocol().getServiceContract().stream() .anyMatch(sc -> sc.getType().equals(ServiceContractType.PUBLISHER) && sc.getService().getUuid().equals(serviceIdStr))) .map(t -> t.getUuid().toString()) .collect(Collectors.toList()); } catch (Exception e) { LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e); continue; } } else { try { JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson); for (int i = 0; i < node.size(); i++) { JsonNode libraryItemNode = node.get(i); JsonNode idNode = libraryItemNode.get("uuid"); String id = idNode.asText(); newIds.add(id); } } catch (Exception e) { LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e); continue; } } try { if (newIds.isEmpty()) { headers.remove(HeaderKeys.Protocols); } else { String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray()); headers.put(HeaderKeys.Protocols, protocolsJson); } } catch (JsonProcessingException e) { LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e); continue; } try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); } catch (JsonProcessingException e) { LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e); continue; } auditRepository.save(exchange); } LOG.info("Finished fixing exchange protocols"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } if (headers.containsKey(HeaderKeys.SenderLocalIdentifier) && headers.containsKey(HeaderKeys.SenderOrganisationUuid)) { continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); Map<UUID, String> orgMap = service.getOrganisations(); if (orgMap.size() != 1) { LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId()); continue; } UUID orgId = orgMap .keySet() .stream() .collect(StreamExtension.firstOrNullCollector()); Organisation organisation = organisationRepository.getById(orgId); String odsCode = organisation.getNationalId(); headers.put(HeaderKeys.SenderLocalIdentifier, odsCode); headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString()); try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Creating exchange " + exchange.getExchangeId()); } LOG.info("Finished fixing exchange headers"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); LibraryRepository libraryRepository = new LibraryRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } boolean changed = false; UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); try { List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid())) { if (!headers.containsKey(HeaderKeys.SourceSystem)) { headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat()); changed = true; } if (!headers.containsKey(HeaderKeys.SystemVersion)) { headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion()); changed = true; } if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) { headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString()); changed = true; } } } } } catch (Exception e) { LOG.error("Failed to find endpoint details for " + exchange.getExchangeId()); continue; } if (changed) { try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Fixed exchange " + exchange.getExchangeId()); } } LOG.info("Finished fixing exchange headers"); }*/ /*private static void testConnection(String configName) { try { JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise"); String driverClass = config.get("driverClass").asText(); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName(driverClass); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void testConnection() { try { JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise"); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName("org.postgresql.Driver"); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception { LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom); LOG.info("Testing database connection"); testConnection(configName); Service service = new ServiceRepository().getById(serviceId); List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet()); UUID orgId = orgIds.get(0); List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE); for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); //for (ExchangeByService exchangeByService: exchangeByServiceList) { UUID exchangeId = exchangeByService.getExchangeId(); if (exchangeIdStartFrom != null) { if (!exchangeIdStartFrom.equals(exchangeId)) { continue; } else { //once we have a match, set to null so we don't skip any subsequent ones exchangeIdStartFrom = null; } } Exchange exchange = AuditWriter.readExchange(exchangeId); String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid); UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr); //this one had 90,000 batches and doesn't need doing again *//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) { LOG.info("Skipping exchange " + exchangeId); continue; }*//* List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId); LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches"); for (int j=0; j<exchangeBatches.size(); j++) { ExchangeBatch exchangeBatch = exchangeBatches.get(j); UUID batchId = exchangeBatch.getBatchId(); if (batchIdStartFrom != null) { if (!batchIdStartFrom.equals(batchId)) { continue; } else { batchIdStartFrom = null; } } LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size()); try { String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null); if (!Strings.isNullOrEmpty(outbound)) { EnterpriseFiler.file(outbound, configName); } } catch (Exception ex) { throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex); } } } }*/ /*private static void fixMissingExchanges() { LOG.info("Fixing missing exchanges"); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;"); stmt.setFetchSize(100); Set<UUID> exchangeIdsDone = new HashSet<>(); AuditRepository auditRepository = new AuditRepository(); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); UUID batchId = row.get(1, UUID.class); Date date = row.getTimestamp(2); //LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date); if (exchangeIdsDone.contains(exchangeId)) { continue; } if (auditRepository.getExchange(exchangeId) != null) { continue; } UUID serviceId = findServiceId(batchId, session); if (serviceId == null) { continue; } Exchange exchange = new Exchange(); ExchangeByService exchangeByService = new ExchangeByService(); ExchangeEvent exchangeEvent = new ExchangeEvent(); Map<String, String> headers = new HashMap<>(); headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString()); String headersJson = null; try { headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setBody("Body not available, as exchange re-created"); exchange.setExchangeId(exchangeId); exchange.setHeaders(headersJson); exchange.setTimestamp(date); exchangeByService.setExchangeId(exchangeId); exchangeByService.setServiceId(serviceId); exchangeByService.setTimestamp(date); exchangeEvent.setEventDesc("Created_By_Conversion"); exchangeEvent.setExchangeId(exchangeId); exchangeEvent.setTimestamp(new Date()); auditRepository.save(exchange); auditRepository.save(exchangeEvent); auditRepository.save(exchangeByService); exchangeIdsDone.add(exchangeId); LOG.info("Creating exchange " + exchangeId); } LOG.info("Finished exchange fix"); } private static UUID findServiceId(UUID batchId, Session session) { Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;"); ResultSet rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId); return null; } Row row = rs.one(); String resourceType = row.getString(0); UUID resourceId = row.get(1, UUID.class); stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;"); rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId); return null; } row = rs.one(); UUID serviceId = row.get(0, UUID.class); return serviceId; }*/ /*private static void fixExchangeEvents() { List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents(); for (ExchangeEvent event: events) { if (event.getEventDesc() != null) { continue; } String eventDesc = ""; int eventType = event.getEvent().intValue(); switch (eventType) { case 1: eventDesc = "Receive"; break; case 2: eventDesc = "Validate"; break; case 3: eventDesc = "Transform_Start"; break; case 4: eventDesc = "Transform_End"; break; case 5: eventDesc = "Send"; break; default: eventDesc = "??? " + eventType; } event.setEventDesc(eventDesc); new AuditRepository().save(null, event); } }*/ /*private static void fixExchanges() { AuditRepository auditRepository = new AuditRepository(); Map<UUID, Set<UUID>> existingOnes = new HashMap(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); List<Exchange> exchanges = auditRepository.getAllExchanges(); for (Exchange exchange: exchanges) { UUID exchangeUuid = exchange.getExchangeId(); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson); continue; } *//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid); if (serviceId == null) { LOG.warn("No service ID found for exchange " + exchange.getExchangeId()); continue; } UUID serviceUuid = UUID.fromString(serviceId); Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid); if (exchangeIdsDone == null) { exchangeIdsDone = new HashSet<>(); List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE); for (ExchangeByService exchangeByService: exchangeByServices) { exchangeIdsDone.add(exchangeByService.getExchangeId()); } existingOnes.put(serviceUuid, exchangeIdsDone); } //create the exchange by service entity if (!exchangeIdsDone.contains(exchangeUuid)) { Date timestamp = exchange.getTimestamp(); ExchangeByService newOne = new ExchangeByService(); newOne.setExchangeId(exchangeUuid); newOne.setServiceId(serviceUuid); newOne.setTimestamp(timestamp); auditRepository.save(newOne); }*//* try { headers.remove(HeaderKeys.BatchIdsJson); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } if (!headers.containsKey(HeaderKeys.BatchIdsJson)) { //fix the batch IDs not being in the exchange List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid); if (!batches.isEmpty()) { List<UUID> batchUuids = batches .stream() .map(t -> t.getBatchId()) .collect(Collectors.toList()); try { String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray()); headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange, null); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } } //} } }*/ /*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException { List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); LibraryRepository libraryRepository = new LibraryRepository(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid()) && technicalInterface.getMessageFormat().equalsIgnoreCase(software) && technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) { return endpointSystemId; } } } } catch (Exception e) { throw new PipelineException("Failed to process endpoints from service " + service.getId()); } return null; } */ /*private static void addSystemIdToExchangeHeaders() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) { LOG.info("Skipping exchange " + exchangeId + " as no service UUID"); continue; } if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " as already got system UUID"); continue; } try { //work out service ID String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); UUID serviceId = UUID.fromString(serviceIdStr); String software = headers.get(HeaderKeys.SourceSystem); String version = headers.get(HeaderKeys.SystemVersion); Service service = serviceRepository.getById(serviceId); UUID systemUuid = findSystemId(service, software, version); headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString()); //work out protocol IDs try { String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr); headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson); } catch (Exception ex) { LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage()); } //save to DB headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); auditRepository.save(exchange); } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); }*/ /*private static void populateExchangeBatchPatients() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); //ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid)) || Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " because no service or system in header"); continue; } try { UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid)); UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid)); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch : exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } UUID batchId = exchangeBatch.getBatchId(); List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString()); if (resourceWrappers.isEmpty()) { continue; } List<UUID> patientIds = new ArrayList<>(); for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) { UUID patientId = resourceWrapper.getResourceId(); if (resourceWrapper.getIsDeleted()) { deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId); } if (!patientIds.contains(patientId)) { patientIds.add(patientId); } } if (patientIds.size() != 1) { LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs"); continue; } UUID patientId = patientIds.get(0); exchangeBatch.setEdsPatientId(patientId); exchangeBatchRepository.save(exchangeBatch); } } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); } private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception { FhirStorageService storageService = new FhirStorageService(serviceId, systemId); ResourceRepository resourceRepository = new ResourceRepository(); List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId); for (ResourceByPatient resourceWrapper: resourceWrappers) { String json = resourceWrapper.getResourceData(); Resource resource = new JsonParser().parse(json); storageService.exchangeBatchDelete(exchangeId, batchId, resource); } }*/ /*private static void convertPatientSearch() { LOG.info("Converting Patient Search"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData()); String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient()); ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId)); if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) { continue; } Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData()); PatientSearchHelper.update(serviceId, systemId, patient); PatientSearchHelper.update(serviceId, systemId, episodeOfCare); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Search"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static List<UUID> findSystemIds(Service service) throws Exception { List<UUID> ret = new ArrayList<>(); List<ServiceInterfaceEndpoint> endpoints = null; try { endpoints = service.getEndpointsList(); for (ServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); ret.add(endpointSystemId); } } catch (Exception e) { throw new Exception("Failed to process endpoints from service " + service.getId()); } return ret; } /*private static void convertPatientLink() { LOG.info("Converting Patient Link"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData()); PatientLinkHelper.updatePersonId(patient); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Link"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, ResourceHistory> resourcesFixed = new HashMap<>(); Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>(); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); ProblemPreTransformer.transform(version, parsers, filer, helper); ObservationPreTransformer.transform(version, parsers, filer, helper); DrugRecordPreTransformer.transform(version, parsers, filer, helper); IssueRecordPreTransformer.transform(version, parsers, filer, helper); DiaryPreTransformer.transform(version, parsers, filer, helper); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() && !patientParser.getDeleted()) { PatientTransformer.createResource(patientParser, filer, helper, version); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { ConsultationTransformer.createResource(consultationParser, filer, helper, version); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { ObservationTransformer.createResource(observationParser, filer, helper, version); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { DiaryTransformer.createResource(diaryParser, filer, helper, version); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version); } } issueRecordParser.close(); filer.waitToFinish(); //just to close the thread pool, even though it's not been used List<Resource> resources = filer.getNewResources(); for (Resource resource: resources) { String patientId = IdHelper.getPatientId(resource); UUID edsPatientId = UUID.fromString(patientId); ResourceType resourceType = resource.getResourceType(); UUID resourceId = UUID.fromString(resource.getId()); boolean foundResourceInDbBatch = false; List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds != null) { for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; } foundResourceInDbBatch = true; for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (!Strings.isNullOrEmpty(json)) { LOG.warn("JSON already in resource " + resourceType + " " + resourceId); } else { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid); } resourceHistory.setIsDeleted(false); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceHistory.setSchemaVersion("0.1"); resourceRepository.save(resourceByExchangeBatch); resourceRepository.save(resourceHistory); batchIdsToPutInProtocolQueue.add(batchId); String key = resourceType.toString() + ":" + resourceId; resourcesFixed.put(key, resourceHistory); } //if a patient became confidential, we will have deleted all resources for that //patient, so we need to undo that too //to undelete WHOLE patient record //1. if THIS resource is a patient //2. get all other deletes from the same exchange batch //3. delete those from resource_by_exchange_batch (the deleted ones only) //4. delete same ones from resource_history //5. retrieve most recent resource_history //6. if not deleted, add to resources fixed if (resourceType == ResourceType.Patient) { List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId); LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId); for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) { if (!resourceInSameBatch.getIsDeleted()) { continue; } //patient and episode resources will be restored by the above stuff, so don't try //to do it again if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString()) || resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion()); mapperResourceByExchangeBatch.delete(resourceInSameBatch); mapperResourceHistory.delete(deletedResourceHistory); batchIdsToPutInProtocolQueue.add(batchId); //check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId()); if (mostRecentDeletedResourceHistory != null && !mostRecentDeletedResourceHistory.getIsDeleted()) { String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId(); resourcesFixed.put(key2, mostRecentDeletedResourceHistory); } } } } } } //if we didn't find records in the DB to update, then if (!foundResourceInDbBatch) { //we can't generate a back-dated time UUID, but we need one so the resource_history //table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange, //and the batch ID is actually a time UUID that was allocated around the right time ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId); //if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange //and we'll pick up the same patient data in a following exchange if (firstBatch == null) { continue; } UUID versionUuid = firstBatch.getBatchId(); //find suitable batch ID UUID batchId = null; if (batchIds != null && batchIds.size() > 0) { batchId = batchIds.get(batchIds.size()-1); } else { //create new batch ID if not found ExchangeBatch exchangeBatch = new ExchangeBatch(); exchangeBatch.setBatchId(UUIDs.timeBased()); exchangeBatch.setExchangeId(exchangeId); exchangeBatch.setInsertedAt(new Date()); exchangeBatch.setEdsPatientId(edsPatientId); exchangeBatchRepository.save(exchangeBatch); batchId = exchangeBatch.getBatchId(); //add to map for next resource if (batchIds == null) { batchIds = new ArrayList<>(); } batchIds.add(batchId); batchesPerPatient.put(edsPatientId, batchIds); } String json = parserPool.composeString(resource); ResourceHistory resourceHistory = new ResourceHistory(); resourceHistory.setResourceId(resourceId); resourceHistory.setResourceType(resourceType.toString()); resourceHistory.setVersion(versionUuid); resourceHistory.setCreatedAt(new Date()); resourceHistory.setServiceId(serviceId); resourceHistory.setSystemId(systemId); resourceHistory.setIsDeleted(false); resourceHistory.setSchemaVersion("0.1"); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch(); resourceByExchangeBatch.setBatchId(batchId); resourceByExchangeBatch.setExchangeId(exchangeId); resourceByExchangeBatch.setResourceType(resourceType.toString()); resourceByExchangeBatch.setResourceId(resourceId); resourceByExchangeBatch.setVersion(versionUuid); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); resourceByExchangeBatch.setResourceData(json); resourceRepository.save(resourceHistory); resourceRepository.save(resourceByExchangeBatch); batchIdsToPutInProtocolQueue.add(batchId); } } if (!batchIdsToPutInProtocolQueue.isEmpty()) { exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue); } } //update the resource_by_service table (and the resource_by_patient view) for (ResourceHistory resourceHistory: resourcesFixed.values()) { UUID latestVersionUpdatedUuid = resourceHistory.getVersion(); ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId()); UUID latestVersionUuid = latestVersion.getVersion(); //if there have been subsequent updates to the resource, then skip it if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) { continue; } Resource resource = parserPool.parse(resourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment)metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(resourceHistory.getServiceId()); resourceByService.setSystemId(resourceHistory.getSystemId()); resourceByService.setResourceType(resourceHistory.getResourceType()); resourceByService.setResourceId(resourceHistory.getResourceId()); resourceByService.setCurrentVersion(resourceHistory.getVersion()); resourceByService.setUpdatedAt(resourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(resourceHistory.getResourceData()); resourceRepository.save(resourceByService); //call out to our patient search and person matching services if (resource instanceof Patient) { PatientLinkHelper.updatePersonId((Patient)resource); PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource); } else if (resource instanceof EpisodeOfCare) { PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource); } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Fixing Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) { LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers); //find any deleted patients List<UUID> deletedPatientUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getDeleted()) { //find the EDS patient ID for this local guid String patientGuid = patientParser.getPatientGuid(); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } deletedPatientUuids.add(edsPatientId); } } patientParser.close(); //go through the appts file to find properly deleted appt GUIDS List<UUID> deletedApptUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class); while (apptParser.nextRecord()) { if (apptParser.getDeleted()) { String patientGuid = apptParser.getPatientGuid(); String slotGuid = apptParser.getSlotGuid(); if (!Strings.isNullOrEmpty(patientGuid)) { String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid); UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId); deletedApptUuids.add(edsApptId); } } } apptParser.close(); for (UUID edsPatientId : deletedPatientUuids) { List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; } for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString()); for (ResourceByExchangeBatch apptWrapper : apptWrappers) { //ignore non-deleted appts if (!apptWrapper.getIsDeleted()) { continue; } //if the appt was deleted legitamately, then skip it UUID apptId = apptWrapper.getResourceId(); if (deletedApptUuids.contains(apptId)) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion()); if (saveChanges) { mapperResourceByExchangeBatch.delete(apptWrapper); mapperResourceHistory.delete(deletedResourceHistory); } LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId); //now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId()); if (mostRecentResourceHistory != null && !mostRecentResourceHistory.getIsDeleted()) { Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment) metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(mostRecentResourceHistory.getServiceId()); resourceByService.setSystemId(mostRecentResourceHistory.getSystemId()); resourceByService.setResourceType(mostRecentResourceHistory.getResourceType()); resourceByService.setResourceId(mostRecentResourceHistory.getResourceId()); resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion()); resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(mostRecentResourceHistory.getResourceData()); if (saveChanges) { resourceRepository.save(resourceByService); } LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table"); } } } } } } LOG.info("Finished Deleted Appointments Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixSlotReferencesForPublisher(String publisher) { try { ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> services = dal.getAll(); for (Service service: services) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { fixSlotReferences(service.getId()); } } } catch (Exception ex) { LOG.error("", ex); } } private static void fixSlotReferences(UUID serviceId) { LOG.info("Fixing Slot References in Appointments for " + serviceId); try { //get patient IDs from patient search List<UUID> patientIds = new ArrayList<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT eds_id FROM resource_id_map WHERE service_id = '" + serviceId + "' AND resource_type = '" + ResourceType.Patient + "';"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close(); LOG.debug("Found " + patientIds.size() + " patients"); int done = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true, null); //for each patient for (UUID patientUuid: patientIds) { //LOG.debug("Checking patient " + patientUuid); //get all appointment resources List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString()); for (ResourceWrapper apptWrapper: appointmentWrappers) { //LOG.debug("Checking appointment " + apptWrapper.getResourceId()); List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId()); //the above returns most recent first, but we want to do them in order historyWrappers = Lists.reverse(historyWrappers); for (ResourceWrapper historyWrapper : historyWrappers) { if (historyWrapper.isDeleted()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted"); continue; } String json = historyWrapper.getResourceData(); Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json); if (!appt.hasSlot()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot"); continue; } if (appt.getSlot().size() != 1) { throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs"); } Reference slotRef = appt.getSlot().get(0); //test if slot reference exists Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef); String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef); if (slotSourceId.indexOf(":") > -1) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot"); continue; } //if not, correct slot reference Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId()); Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference); String sourceId = ReferenceHelper.getReferenceId(apptLocalReference); Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId); Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper); String slotEdsReferenceValue = slotEdsReference.getReference(); String oldSlotRefValue = slotRef.getReference(); slotRef.setReference(slotEdsReferenceValue); //LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue); //save appointment json = FhirSerializationHelper.serializeResource(appt); historyWrapper.setResourceData(json); saveResourceWrapper(serviceId, historyWrapper); fixed++; } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); } } LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); LOG.info("Finished Fixing Slot References in Appointments for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixReviews(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, Long> problemCodes = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (problemParser.nextRecord()) { String patientGuid = problemParser.getPatientGuid(); String observationGuid = problemParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (!problemCodes.containsKey(key)) { problemCodes.put(key, null); } } problemParser.close(); while (observationParser.nextRecord()) { String patientGuid = observationParser.getPatientGuid(); String observationGuid = observationParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (problemCodes.containsKey(key)) { Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } problemCodes.put(key, codeId); } } observationParser.close(); LOG.info("Found " + problemCodes.size() + " problem codes so far"); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); while (observationParser.nextRecord()) { String problemGuid = observationParser.getProblemGuid(); if (!Strings.isNullOrEmpty(problemGuid)) { String patientGuid = observationParser.getPatientGuid(); Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } String key = patientGuid + ":" + problemGuid; Long problemCodeId = problemCodes.get(key); if (problemCodeId == null || problemCodeId.longValue() != codeId.longValue()) { continue; } //if here, our code is the same as the problem, so it's a review String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid(); ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper); for (UUID systemId: systemIds) { UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); if (edsObservationId == null) { //try observations as diagnostic reports, because it could be one of those instead if (resourceType == ResourceType.Observation) { resourceType = ResourceType.DiagnosticReport; edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); } if (edsObservationId == null) { throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId); } } List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; //throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId); } for (UUID batchId: batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; //throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId); } for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (Strings.isNullOrEmpty(json)) { throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId); } Resource resource = parserPool.parse(json); if (addReviewExtension((DomainResource)resource)) { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId); resourceRepository.save(resourceByExchangeBatch); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid); } resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceRepository.save(resourceHistory); ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId); if (resourceByService != null) { UUID serviceVersionUuid = resourceByService.getCurrentVersion(); if (serviceVersionUuid.equals(versionUuid)) { resourceByService.setResourceData(json); resourceRepository.save(resourceByService); } } } else { LOG.info("" + resourceType + " " + edsObservationId + " already has extension"); } } } } //1. find out resource type originall saved from //2. retrieve from resource_by_exchange_batch //3. update resource in resource_by_exchange_batch //4. retrieve from resource_history //5. update resource_history //6. retrieve record from resource_by_service //7. if resource_by_service version UUID matches the resource_history updated, then update that too } } observationParser.close(); } } LOG.info("Finished Fixing Reviews"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static boolean addReviewExtension(DomainResource resource) { if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) { return false; } Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true)); resource.addExtension(extension); return true; }*/ /*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } //once we match the servce, set this to null to do all other services justThisService = null; LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<String> interestingPatientGuids = new ArrayList<>(); Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient); File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() || patientParser.getDeleted()) { interestingPatientGuids.add(patientParser.getPatientGuid()); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { interestingPatientGuids.add(consultationParser.getPatientGuid()); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { interestingPatientGuids.add(observationParser.getPatientGuid()); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { interestingPatientGuids.add(diaryParser.getPatientGuid()); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { interestingPatientGuids.add(drugRecordParser.getPatientGuid()); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { interestingPatientGuids.add(issueRecordParser.getPatientGuid()); } } issueRecordParser.close(); } Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); for (String interestingPatientGuid: interestingPatientGuids) { if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid); } for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) { Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId); List<UUID> batches = batchesPerPatient.get(edsPatientId); if (batches != null) { Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId); if (batchesForExchange == null) { batchesForExchange = new HashSet<>(); exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange); } batchesForExchange.addAll(batches); } } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Running Protocols for Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixOrgs() { LOG.info("Posting orgs to protocol queue"); String[] orgIds = new String[]{ "332f31a2-7b28-47cb-af6f-18f65440d43d", "c893d66b-eb89-4657-9f53-94c5867e7ed9"}; ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>(); for (String orgId: orgIds) { LOG.info("Doing org ID " + orgId); UUID orgUuid = UUID.fromString(orgId); try { //select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING; ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid); UUID batchId = resourceByExchangeBatch.getBatchId(); //select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING; ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId); UUID exchangeId = exchangeBatch.getExchangeId(); Set<UUID> list = exchangeBatches.get(exchangeId); if (list == null) { list = new HashSet<>(); exchangeBatches.put(exchangeId, list); } list.add(batchId); } catch (Exception ex) { LOG.error("", ex); break; } } try { //find the config for our protocol queue (which is in the inbound config) String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatches.keySet()) { Set<UUID> batchIds = exchangeBatches.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } catch (Exception ex) { LOG.error("", ex); return; } LOG.info("Finished posting orgs to protocol queue"); }*/ /*private static void findCodes() { LOG.info("Finding missing codes"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID serviceId = row.get(0, UUID.class); UUID systemId = row.get(1, UUID.class); UUID exchangeId = row.get(2, UUID.class); UUID version = row.get(3, UUID.class); ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version); String xml = audit.getErrorXml(); if (xml == null) { continue; } String codePrefix = "Failed to find clinical code CodeableConcept for codeId "; int codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " clinical code " + code + " from " + audit.getStarted()); continue; } codePrefix = "Failed to find medication CodeableConcept for codeId "; codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " drug code " + code + " from " + audit.getStarted()); continue; } } LOG.info("Finished finding missing codes"); }*/ private static void createEmisSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Emis Subset"); try { Set<String> patientGuids = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } patientGuids.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createEmisSubsetForFile(sourceDir, destDir, patientGuids); LOG.info("Finished Creating Emis Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createEmisSubsetForFile(File sourceDir, File destDir, Set<String> patientGuids) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createEmisSubsetForFile(sourceFile, destFile, patientGuids); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } CSVFormat format = CSVFormat.DEFAULT.withHeader(); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile))); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("PatientGuid")) { filterColumn = "PatientGuid"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile))); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientGuid = csvRecord.get(filterColumn); if (Strings.isNullOrEmpty(patientGuid) //if empty, carry over this record || patientGuids.contains(patientGuid)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating TPP Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createTppSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating TPP Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } //LOG.info("Doing dir " + sourceFile); createTppSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("IDPatient")) { filterColumn = "IDPatient"; } else if (name.equalsIgnoreCase("SRPatient.csv")) { filterColumn = "RowIdentifier"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); /*} else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); copyFile(sourceFile, destFile); }*/ } } } private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Vision Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createVisionSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating Vision Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createVisionSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; if (name.contains("encounter_data") || name.contains("journal_data") || name.contains("patient_data") || name.contains("referral_data")) { filterColumn = 0; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Homerton Subset"); try { Set<String> PersonIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } PersonIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createHomertonSubsetForFile(sourceDir, destDir, PersonIds); LOG.info("Finished Creating Homerton Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createHomertonSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withHeader(); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //PersonId column at 1 if (name.contains("ENCOUNTER") || name.contains("PATIENT")) { filterColumn = 1; } else if (name.contains("DIAGNOSIS")) { //PersonId column at 13 filterColumn = 13; } else if (name.contains("ALLERGY")) { //PersonId column at 2 filterColumn = 2; } else if (name.contains("PROBLEM")) { //PersonId column at 4 filterColumn = 4; } else { //if no patient column, just copy the file (i.e. PROCEDURE) parser.close(); LOG.info("Copying file without PatientId " + sourceFile); copyFile(sourceFile, destFile); continue; } Map<String, Integer> headerMap = parser.getHeaderMap(); String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Adastra Subset"); try { Set<String> caseIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } //adastra extract files are all keyed on caseId caseIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createAdastraSubsetForFile(sourceDir, destDir, caseIds); LOG.info("Finished Creating Adastra Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createAdastraSubsetForFile(sourceFile, destFile, caseIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|'); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //CaseRef column at 0 if (name.contains("NOTES") || name.contains("CASEQUESTIONS") || name.contains("OUTCOMES") || name.contains("CONSULTATION") || name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") || name.contains("PATIENT")) { filterColumn = 0; } else if (name.contains("CASE")) { //CaseRef column at 2 filterColumn = 2; } else if (name.contains("PROVIDER")) { //CaseRef column at 7 filterColumn = 7; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String caseId = csvRecord.get(filterColumn); if (caseIds.contains(caseId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } /*private static void exportFhirToCsv(UUID serviceId, String destinationPath) { try { File dir = new File(destinationPath); if (dir.exists()) { dir.mkdirs(); } Map<String, CSVPrinter> hmPrinters = new HashMap<>(); EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current"); LOG.debug("Running query"); ResultSet rs = ps.executeQuery(); LOG.debug("Got result set"); while (rs.next()) { String id = rs.getString(1); String type = rs.getString(2); String json = rs.getString(3); CSVPrinter printer = hmPrinters.get(type); if (printer == null) { String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv"); FileWriter fileWriter = new FileWriter(new File(path)); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader("resource_id", "resource_json") .withDelimiter('\t') .withEscape((Character) null) .withQuote((Character) null) .withQuoteMode(QuoteMode.MINIMAL); printer = new CSVPrinter(bufferedWriter, format); hmPrinters.put(type, printer); } printer.printRecord(id, json); } for (String type : hmPrinters.keySet()) { CSVPrinter printer = hmPrinters.get(type); printer.flush(); printer.close(); } ps.close(); entityManager.close(); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixTPPNullOrgs(String sourceDir, String orgODS) throws Exception { final String COLUMN_ORG = "IDOrganisationVisibleTo"; File[] files = new File(sourceDir).listFiles(); if (files == null) return; LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String sourceFileName = sourceFile.getName(); if (sourceFile.isDirectory()) { fixTPPNullOrgs(sourceFileName, orgODS); } else { LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(sourceFileName); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); Map<String, Integer> headerMap = parser.getHeaderMap(); if (!headerMap.containsKey(COLUMN_ORG)) { //if no COLUMN_ORG column, ignore LOG.info("Ignoring file with no " + COLUMN_ORG + " column: " + sourceFile); parser.close(); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } String destFileName = sourceFileName.concat(".FIXED"); BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFileName), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); //iterate down the file and look at Org Column Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String fileOrgODS = csvRecord.get(COLUMN_ORG); //set the empty value to that orgODS value passed in if (Strings.isNullOrEmpty(fileOrgODS)) { Map <String, String> recordMap = csvRecord.toMap(); recordMap.put(COLUMN_ORG, String.valueOf(orgODS)); List<String> alteredCsvRecord = new ArrayList<String>(); for (String key : columnHeaders) { alteredCsvRecord.add(recordMap.get(key)); } printer.printRecord(alteredCsvRecord); printer.flush(); } else { if (!fileOrgODS.equalsIgnoreCase(orgODS)) { parser.close(); printer.flush(); printer.close(); throw new Exception("File contains different ODS codes to parameter value - aborting"); } //write the record back unchanged printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); //Finally, delete source file and rename the fixed destination file back to source sourceFile.delete(); new File (destFileName).renameTo(new File (sourceFileName)); } } }*/ } /*class ResourceFiler extends FhirResourceFiler { public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError, List<UUID> batchIdsCreated, int maxFilingThreads) { super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads); } private List<Resource> newResources = new ArrayList<>(); public List<Resource> getNewResources() { return newResources; } @Override public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling saveAdminResource"); } @Override public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deleteAdminResource"); } @Override public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { for (Resource resource: resources) { if (mapIds) { IdHelper.mapIds(getServiceId(), getSystemId(), resource); } newResources.add(resource); } } @Override public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deletePatientResource"); } }*/ /* class MoveToS3Runnable implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(MoveToS3Runnable.class); private List<FileInfo> files = null; private AtomicInteger done = null; public MoveToS3Runnable(List<FileInfo> files, AtomicInteger done) { this.files = files; this.done = done; } @Override public void run() { try { doWork(); } catch (Exception ex) { LOG.error("", ex); } } private void doWork() throws Exception { SourceFileMappingDalI db = DalProvider.factorySourceFileMappingDal(); //write to database //Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>(); for (FileInfo info: files) { String path = info.getFilePath(); InputStream inputStream = FileHelper.readFileFromSharedStorage(path); ZipInputStream zis = new ZipInputStream(inputStream); ZipEntry entry = zis.getNextEntry(); if (entry == null) { throw new Exception("No entry in zip file " + path); } byte[] entryBytes = IOUtils.toByteArray(zis); String json = new String(entryBytes); inputStream.close(); ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(json); ResourceWrapper wrapper = new ResourceWrapper(); String versionStr = FilenameUtils.getBaseName(path); wrapper.setVersion(UUID.fromString(versionStr)); Date d = info.getLastModified(); wrapper.setCreatedAt(d); File f = new File(path); f = f.getParentFile(); String resourceIdStr = f.getName(); wrapper.setResourceId(UUID.fromString(resourceIdStr)); f = f.getParentFile(); String resourceTypeStr = f.getName(); wrapper.setResourceType(resourceTypeStr); f = f.getParentFile(); String serviceIdStr = f.getName(); wrapper.setServiceId(UUID.fromString(serviceIdStr)); Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>(); batch.put(wrapper, audit); try { db.saveResourceMappings(batch); } catch (Exception ex) { String msg = ex.getMessage(); if (msg.indexOf("Duplicate entry") == -1) { throw ex; } } */ /*if (batch.size() > 5) { db.saveResourceMappings(batch); batch.clear(); }*//* int nowDone = done.incrementAndGet(); if (nowDone % 1000 == 0) { LOG.debug("Done " + nowDone + " / " + files.size()); } } */ /*if (!batch.isEmpty()) { db.saveResourceMappings(batch); batch.clear(); }*//* } }*/ class PopulateDataDateCallable implements Callable { private static final Logger LOG = LoggerFactory.getLogger(PopulateDataDateCallable.class); private static ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); private UUID exchangeId = null; private AtomicInteger fixed = null; public PopulateDataDateCallable(UUID exchangeId, AtomicInteger fixed) { this.exchangeId = exchangeId; this.fixed = fixed; } private void doWork() throws Exception { Exchange exchange = exchangeDal.getExchange(exchangeId); //check if already done String existingVal = exchange.getHeader(HeaderKeys.DataDate); String software = exchange.getHeader(HeaderKeys.SourceSystem); String version = exchange.getHeader(HeaderKeys.SystemVersion); if (!Strings.isNullOrEmpty(existingVal)) { LOG.info("Already done exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } String body = exchange.getBody(); if (body.equals("[]")) { LOG.error("Empty body found in exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } Date lastDataDate = OpenEnvelope.calculateLastDataDate(software, version, body); if (lastDataDate == null) { LOG.error("Failed to calculate data for exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } exchange.setHeaderAsDate(HeaderKeys.DataDate, lastDataDate); exchangeDal.save(exchange); //mark as done markAsDone(); fixed.incrementAndGet(); } private void markAsDone() throws Exception { EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager(); auditEntityManager.getTransaction().begin(); SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate(); Connection auditConnection = auditSession.connection(); String sql = "UPDATE drewtest.exchange_ids SET done = 1 WHERE id = ?"; PreparedStatement ps = auditConnection.prepareStatement(sql); ps.setString(1, exchangeId.toString()); ps.executeUpdate(); auditEntityManager.getTransaction().commit(); ps.close(); auditEntityManager.close(); //LOG.debug("Marked as done using: " + sql); } @Override public Object call() throws Exception { try { doWork(); } catch (Throwable ex) { LOG.error("Error with " + exchangeId, ex); } return null; } } /* class TestRabbitConsumer extends DefaultConsumer { private static final Logger LOG = LoggerFactory.getLogger(TestRabbitConsumer.class); public TestRabbitConsumer(Channel channel) { super(channel); } @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] bytes) throws IOException { long deliveryTag = envelope.getDeliveryTag(); String bodyStr = new String(bytes, "UTF-8"); LOG.info("Received exchange body: " + bodyStr); try { Thread.sleep(1000); } catch (Throwable t) { LOG.error("", t); } this.getChannel().basicAck(deliveryTag, false); } }*/
src/eds-queuereader/src/main/java/org/endeavourhealth/queuereader/Main.java
package org.endeavourhealth.queuereader; import OpenPseudonymiser.Crypto; import com.fasterxml.jackson.databind.JsonNode; import com.google.common.base.Strings; import org.apache.commons.csv.*; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.endeavourhealth.common.cache.ObjectMapperPool; import org.endeavourhealth.common.config.ConfigManager; import org.endeavourhealth.common.security.datasharingmanagermodel.models.database.DataSharingAgreementEntity; import org.endeavourhealth.common.security.usermanagermodel.models.caching.DataSharingAgreementCache; import org.endeavourhealth.common.security.usermanagermodel.models.caching.OrganisationCache; import org.endeavourhealth.common.security.usermanagermodel.models.caching.ProjectCache; import org.endeavourhealth.common.utility.FileHelper; import org.endeavourhealth.common.utility.ThreadPool; import org.endeavourhealth.common.utility.ThreadPoolError; import org.endeavourhealth.core.configuration.ConfigDeserialiser; import org.endeavourhealth.core.configuration.PostMessageToExchangeConfig; import org.endeavourhealth.core.configuration.QueueReaderConfiguration; import org.endeavourhealth.core.database.dal.DalProvider; import org.endeavourhealth.core.database.dal.admin.LibraryRepositoryHelper; import org.endeavourhealth.core.database.dal.admin.ServiceDalI; import org.endeavourhealth.core.database.dal.admin.models.Service; import org.endeavourhealth.core.database.dal.audit.ExchangeBatchDalI; import org.endeavourhealth.core.database.dal.audit.ExchangeDalI; import org.endeavourhealth.core.database.dal.audit.models.*; import org.endeavourhealth.core.database.dal.datagenerator.SubscriberZipFileUUIDsDalI; import org.endeavourhealth.core.database.dal.eds.PatientLinkDalI; import org.endeavourhealth.core.database.dal.eds.PatientSearchDalI; import org.endeavourhealth.core.database.dal.ehr.ResourceDalI; import org.endeavourhealth.core.database.dal.ehr.models.ResourceWrapper; import org.endeavourhealth.core.database.dal.publisherTransform.models.ResourceFieldMappingAudit; import org.endeavourhealth.core.database.dal.reference.PostcodeDalI; import org.endeavourhealth.core.database.dal.reference.models.PostcodeLookup; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberOrgMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberPersonMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.SubscriberResourceMappingDalI; import org.endeavourhealth.core.database.dal.subscriberTransform.models.SubscriberId; import org.endeavourhealth.core.database.rdbms.ConnectionManager; import org.endeavourhealth.core.database.rdbms.enterprise.EnterpriseConnector; import org.endeavourhealth.core.exceptions.TransformException; import org.endeavourhealth.core.fhirStorage.FhirStorageService; import org.endeavourhealth.core.fhirStorage.ServiceInterfaceEndpoint; import org.endeavourhealth.core.messaging.pipeline.components.MessageTransformOutbound; import org.endeavourhealth.core.messaging.pipeline.components.OpenEnvelope; import org.endeavourhealth.core.messaging.pipeline.components.PostMessageToExchange; import org.endeavourhealth.core.queueing.QueueHelper; import org.endeavourhealth.core.xml.QueryDocument.*; import org.endeavourhealth.transform.common.*; import org.endeavourhealth.transform.emis.EmisCsvToFhirTransformer; import org.endeavourhealth.transform.subscriber.targetTables.OutputContainer; import org.endeavourhealth.transform.subscriber.targetTables.SubscriberTableId; import org.hibernate.internal.SessionImpl; import org.hl7.fhir.instance.model.MedicationStatement; import org.hl7.fhir.instance.model.ResourceType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.persistence.EntityManager; import java.io.*; import java.lang.System; import java.lang.reflect.Constructor; import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.StandardOpenOption; import java.sql.*; import java.text.SimpleDateFormat; import java.util.*; import java.util.Date; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicInteger; import java.util.regex.Pattern; public class Main { private static final Logger LOG = LoggerFactory.getLogger(Main.class); public static void main(String[] args) throws Exception { String configId = args[0]; LOG.info("Initialising config manager"); ConfigManager.initialize("queuereader", configId); /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEncounters")) { String table = args[1]; fixEncounters(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("DeleteEnterpriseObs")) { String filePath = args[1]; String configName = args[2]; int batchSize = Integer.parseInt(args[3]); deleteEnterpriseObs(filePath, configName, batchSize); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("TestDSM")) { String odsCode = args[1]; String projectId = args[2]; testDsm(odsCode, projectId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FindMissedExchanges")) { String tableName = args[1]; String odsCodeRegex = null; if (args.length > 2) { odsCodeRegex = args[2]; } findMissedExchanges(tableName, odsCodeRegex); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("SendPatientsToSubscriber")) { String tableName = args[1]; sendPatientsToSubscriber(tableName); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateDeleteZipsForSubscriber")) { int batchSize = Integer.parseInt(args[1]); String sourceTable = args[2]; int subscriberId = Integer.parseInt(args[3]); createDeleteZipsForSubscriber(batchSize, sourceTable, subscriberId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestJMX")) { testJmx(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestDatabases")) { String serviceIdStr = args[1]; String subscriberConfigName = args[2]; testDatabases(serviceIdStr, subscriberConfigName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulatePatientSearchEpisodeOdsCode")) { populatePatientSearchEpisodeOdsCode(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisSnomedCodes")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixEmisSnomedCodes(odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisDrugRecords")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixEmisDrugRecords(odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberDBPseudoId")) { String subscriberConfigName = args[1]; String saltKeyName = args[2]; populateSubscriberPseudoId(subscriberConfigName, saltKeyName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("InvestigateMissingPatients")) { String nhsNumberFile = args[1]; String protocolName = args[2]; String subscriberConfigName = args[3]; String odsCodeRegex = args[4]; investigateMissingPatients(nhsNumberFile, protocolName, subscriberConfigName, odsCodeRegex); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("FixMedicationStatementIsActive")) { String odsCodeRegex = null; if (args.length > 1) { odsCodeRegex = args[1]; } fixMedicationStatementIsActive(odsCodeRegex); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixMissingEmisEthnicities")) { String filePath = args[1]; String odsCodeRegex = null; if (args.length > 2) { odsCodeRegex = args[2]; } fixMissingEmisEthnicities(filePath, odsCodeRegex); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("UpdatePatientSearch")) { String filePath = args[1]; updatePatientSearch(filePath); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("SubscriberFullLoad")) { UUID serviceId = UUID.fromString(args[1]); UUID protocolId = UUID.fromString(args[2]); QueueHelper.queueUpFullServiceForPopulatingSubscriber(serviceId, protocolId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunPersonUpdater")) { String enterpriseConfigName = args[1]; runPersonUpdater(enterpriseConfigName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CountNhsNumberChanges")) { String odsCode = args[1]; countNhsNumberChanges(odsCode); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("TransformPatients")) { String sourceFile = args[1]; transformPatients(sourceFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindPatientsThatNeedTransforming")) { String file = args[1]; String odsCode = null; if (args.length > 2) { odsCode = args[2]; } findPatientsThatNeedTransforming(file, odsCode); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateDigest")) { String url = args[1]; String user = args[2]; String pass = args[3]; String table = args[4]; String columnFrom = args[5]; String columnTo = args[6]; String base64Salt = args[7]; String validNhsNumberCol = null; if (args.length > 8) { validNhsNumberCol = args[8]; } createDigest(url, user, pass, table, columnFrom, columnTo, base64Salt, validNhsNumberCol); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertAudits2")) { String configName = args[1]; String tempTable = args[2]; int threads = Integer.parseInt(args[3]); int batchSize = Integer.parseInt(args[4]); boolean testMode = Boolean.parseBoolean(args[5]); convertFhirAudits2(configName, tempTable, threads, batchSize, testMode); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertAudits")) { String configName = args[2]; int threads = Integer.parseInt(args[3]); int batchSize = Integer.parseInt(args[4]); convertFhirAudits(configName, threads, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestRabbit")) { String nodes = args[1]; String username = args[2]; String password = args[3]; String exchangeName = args[4]; String queueName = args[5]; String sslProtocol = null; if (args.length > 6) { sslProtocol = args[6]; } testRabbit(nodes, username, password, sslProtocol, exchangeName, queueName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisEpisodes1")) { String odsCode = args[1]; //fixEmisEpisodes1(odsCode); fixEmisEpisodes2(odsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Listing")) { String path = args[1]; testS3Listing(path); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CheckForBartsMissingFiles")) { String sinceDate = args[1]; checkForBartsMissingFiles(sinceDate); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateHomertonSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createHomertonSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateAdastraSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createAdastraSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateVisionSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createVisionSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTppSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createTppSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsSubset")) { String sourceDirPath = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String samplePatientsFile = args[4]; createBartsSubset(sourceDirPath, serviceUuid, systemUuid, samplePatientsFile); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CreateEmisSubset")) { String sourceDirPath = args[1]; String destDirPath = args[2]; String samplePatientsFile = args[3]; createEmisSubset(sourceDirPath, destDirPath, samplePatientsFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindBartsPersonIds")) { String sourceFile = args[1]; UUID serviceUuid = UUID.fromString(args[2]); UUID systemUuid = UUID.fromString(args[3]); String dateCutoffStr = args[4]; String dstFile = args[5]; findBartsPersonIds(sourceFile, serviceUuid, systemUuid, dateCutoffStr, dstFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixTPPNullOrgs")) { String sourceDirPath = args[1]; String orgODS = args[2]; LOG.info("Fixing TPP Null Organisations"); fixTPPNullOrgs(sourceDirPath, orgODS); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisDeletedPatients")) { String odsCode = args[1]; fixEmisDeletedPatients(odsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostPatientToProtocol")) { String odsCode = args[1]; String patientUuid = args[2]; postPatientToProtocol(odsCode, patientUuid); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("PostPatientsToProtocol")) { UUID serviceId = UUID.fromString(args[1]); UUID systemId = UUID.fromString(args[2]); String sourceFile = args[3]; postPatientsToProtocol(serviceId, systemId, sourceFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestMetrics")) { testMetrics(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestXML")) { testXml(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestGraphiteMetrics")) { String host = args[1]; String port = args[2]; testGraphiteMetrics(host, port); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsOrgs")) { String serviceId = args[1]; fixBartsOrgs(serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestPreparedStatements")) { String url = args[1]; String user = args[2]; String pass = args[3]; String serviceId = args[4]; testPreparedStatements(url, user, pass, serviceId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CreateTransformMap")) { UUID serviceId = UUID.fromString(args[1]); String table = args[2]; String dstFile = args[3]; createTransforMap(serviceId, table, dstFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportFhirToCsv")) { UUID serviceId = UUID.fromString(args[1]); String path = args[2]; exportFhirToCsv(serviceId, path); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestBatchInserts")) { String url = args[1]; String user = args[2]; String pass = args[3]; String num = args[4]; String batchSize = args[5]; testBatchInserts(url, user, pass, num, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ApplyEmisAdminCaches")) { applyEmisAdminCaches(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSubscribers")) { fixSubscriberDbs(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3Read")) { String s3Bucket = args[1]; String s3Key = args[2]; String start = args[3]; String len = args[4]; testS3Read(s3Bucket, s3Key, start, len); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3ForPublisher")) { String publisherId = args[1]; String systemId = args[2]; fixEmisProblems3ForPublisher(publisherId, UUID.fromString(systemId)); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisProblems3")) { String serviceId = args[1]; String systemId = args[2]; fixEmisProblems3(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("CheckDeletedObs")) { String serviceId = args[1]; String systemId = args[2]; checkDeletedObs(UUID.fromString(serviceId), UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixPersonsNoNhsNumber")) { fixPersonsNoNhsNumber(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("CalculateUprnPseudoIds")) { String subscriberConfigName = args[1]; String targetTable = args[2]; calculateUprnPseudoIds(subscriberConfigName, targetTable); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateSubscriberUprnTable")) { String subscriberConfigName = args[1]; Integer overrideBatchSize = null; if (args.length > 2) { overrideBatchSize = Integer.valueOf(args[2]); } String patientId = null; if (args.length > 3) { patientId = args[3]; } populateSubscriberUprnTable(subscriberConfigName, overrideBatchSize, patientId); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertEmisGuid")) { convertEmisGuids(); System.exit(0); }*/ if (args.length >= 1 && args[0].equalsIgnoreCase("PostToRabbit")) { String exchangeName = args[1]; String srcFile = args[2]; Integer throttle = null; if (args.length > 3) { throttle = Integer.parseInt(args[3]); } postToRabbit(exchangeName, srcFile, throttle); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("PostExchangesToProtocol")) { String srcFile = args[1]; postExchangesToProtocol(srcFile); System.exit(0); } /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsPatients")) { UUID serviceId = UUID.fromString(args[1]); fixBartsPatients(serviceId); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixDeceasedPatients")) { String subscriberConfig = args[1]; fixDeceasedPatients(subscriberConfig); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("FixPseudoIds")) { String subscriberConfig = args[1]; int threads = Integer.parseInt(args[2]); fixPseudoIds(subscriberConfig, threads); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("MoveS3ToAudit")) { int threads = Integer.parseInt(args[1]); moveS3ToAudit(threads); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ConvertExchangeBody")) { String systemId = args[1]; convertExchangeBody(UUID.fromString(systemId)); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixReferrals")) { fixReferralRequests(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateNewSearchTable")) { String table = args[1]; populateNewSearchTable(table); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixBartsEscapes")) { String filePath = args[1]; fixBartsEscapedFiles(filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; String systemId = args[2]; String filePath = args[3]; postToInboundFromFile(UUID.fromString(serviceId), UUID.fromString(systemId), filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixDisabledExtract")) { String sharedStoragePath = args[1]; String tempDir = args[2]; String systemId = args[3]; String serviceOdsCode = args[4]; fixDisabledEmisExtract(serviceOdsCode, systemId, sharedStoragePath, tempDir); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixEmisMissingSlots")) { String serviceOdsCode = args[1]; fixEmisMissingSlots(serviceOdsCode); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateLastDataDate")) { int threads = Integer.parseInt(args[1]); int batchSize = Integer.parseInt(args[2]); populateLastDataDate(threads, batchSize); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestSlack")) { testSlack(); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PostToInbound")) { String serviceId = args[1]; boolean all = Boolean.parseBoolean(args[2]); postToInbound(UUID.fromString(serviceId), all); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixPatientSearch")) { String serviceId = args[1]; String systemId = null; if (args.length > 2) { systemId = args[2]; } if (serviceId.equalsIgnoreCase("All")) { fixPatientSearchAllServices(systemId); } else { fixPatientSearch(serviceId, systemId); } System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixSlotReferences")) { String serviceId = args[1]; try { UUID serviceUuid = UUID.fromString(serviceId); fixSlotReferences(serviceUuid); } catch (Exception ex) { fixSlotReferencesForPublisher(serviceId); } System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("TestAuditingFile")) { UUID serviceId = UUID.fromString(args[1]); UUID systemId = UUID.fromString(args[2]); UUID exchangeId = UUID.fromString(args[3]); String version = args[4]; String filePath = args[5]; testAuditingFile(serviceId, systemId, exchangeId, version, filePath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("TestS3VsMySQL")) { UUID serviceUuid = UUID.fromString(args[1]); int count = Integer.parseInt(args[2]); int sqlBatchSize = Integer.parseInt(args[3]); String bucketName = args[4]; testS3VsMySql(serviceUuid, count, sqlBatchSize, bucketName); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("Exit")) { String exitCode = args[1]; LOG.info("Exiting with error code " + exitCode); int exitCodeInt = Integer.parseInt(exitCode); System.exit(exitCodeInt); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("RunSql")) { String host = args[1]; String username = args[2]; String password = args[3]; String sqlFile = args[4]; runSql(host, username, password, sqlFile); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("PopulateProtocolQueue")) { String serviceId = null; if (args.length > 1) { serviceId = args[1]; } String startingExchangeId = null; if (args.length > 2) { startingExchangeId = args[2]; } populateProtocolQueue(serviceId, startingExchangeId); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEncounterTerms")) { String path = args[1]; String outputPath = args[2]; findEncounterTerms(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FindEmisStartDates")) { String path = args[1]; String outputPath = args[2]; findEmisStartDates(path, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("ExportHl7Encounters")) { String sourceCsvPpath = args[1]; String outputPath = args[2]; exportHl7Encounters(sourceCsvPpath, outputPath); System.exit(0); }*/ /*if (args.length >= 1 && args[0].equalsIgnoreCase("FixExchangeBatches")) { fixExchangeBatches(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindCodes")) { findCodes(); System.exit(0); }*/ /*if (args.length >= 0 && args[0].equalsIgnoreCase("FindDeletedOrgs")) { findDeletedOrgs(); System.exit(0); }*/ if (args.length >= 0 && args[0].equalsIgnoreCase("LoadEmisData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String onlyThisFileType = null; if (args.length > 6) { onlyThisFileType = args[6]; } loadEmisData(serviceId, systemId, dbUrl, dbUsername, dbPassword, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateEmisDataTables")) { createEmisDataTables(); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("LoadBartsData")) { String serviceId = args[1]; String systemId = args[2]; String dbUrl = args[3]; String dbUsername = args[4]; String dbPassword = args[5]; String startDate = args[6]; String onlyThisFileType = null; if (args.length > 7) { onlyThisFileType = args[7]; } loadBartsData(serviceId, systemId, dbUrl, dbUsername, dbPassword, startDate, onlyThisFileType); System.exit(0); } if (args.length >= 1 && args[0].equalsIgnoreCase("CreateBartsDataTables")) { createBartsDataTables(); System.exit(0); } if (args.length != 1) { LOG.error("Usage: queuereader config_id"); return; } LOG.info("--------------------------------------------------"); LOG.info("EDS Queue Reader " + configId); LOG.info("--------------------------------------------------"); LOG.info("Fetching queuereader configuration"); String configXml = ConfigManager.getConfiguration(configId); QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); /*LOG.info("Registering shutdown hook"); registerShutdownHook();*/ // Instantiate rabbit handler RabbitHandler rabbitHandler = new RabbitHandler(configuration, configId); rabbitHandler.start(); LOG.info("EDS Queue reader running (kill file location " + TransformConfig.instance().getKillFileLocation() + ")"); } private static void testDsm(String odsCode, String projectId) { LOG.info("Testing DSM for " + odsCode + " and project " + projectId); try { LOG.debug("Testing getAllPublishersForProjectWithSubscriberCheck"); List<String> results = ProjectCache.getAllPublishersForProjectWithSubscriberCheck(projectId, odsCode); LOG.debug("Got " + results); LOG.debug(""); LOG.debug(""); LOG.debug("Testing doesOrganisationHaveDPA"); Boolean b = OrganisationCache.doesOrganisationHaveDPA(odsCode); LOG.debug("Got " + b); LOG.debug(""); LOG.debug(""); LOG.debug("Testing getAllDSAsForPublisherOrg"); List<DataSharingAgreementEntity> list = DataSharingAgreementCache.getAllDSAsForPublisherOrg(odsCode); if (list == null) { LOG.debug("Got NULL"); } else { LOG.debug("Got " + list.size()); for (DataSharingAgreementEntity e: list) { LOG.debug(" -> " + e.getName() + " " + e.getUuid()); } } LOG.info("Finished Testing DSM for " + odsCode); } catch (Throwable t) { LOG.error("", t); } } private static void sendPatientsToSubscriber(String tableName) { LOG.info("Sending patients to subscriber from " + tableName); try { Connection conn = ConnectionManager.getEdsConnection(); String sql = "SELECT service_id, protocol_id, patient_id FROM " + tableName + " ORDER BY service_id, protocol_id"; PreparedStatement ps = conn.prepareStatement(sql); ps.setFetchSize(5000); List<UUID> batchPatientIds = new ArrayList<>(); UUID batchServiceId = null; UUID batchProtocolId = null; ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; UUID serviceId = UUID.fromString(rs.getString(col++)); UUID protocolId = UUID.fromString(rs.getString(col++)); UUID patientId = UUID.fromString(rs.getString(col++)); if (batchServiceId == null || batchProtocolId == null || !serviceId.equals(batchServiceId) || !protocolId.equals(batchProtocolId)) { //send any found previously if (!batchPatientIds.isEmpty()) { LOG.debug("Doing batch of " + batchPatientIds.size() + " for service " + batchServiceId + " and protocol " + batchProtocolId); QueueHelper.queueUpFullServiceForPopulatingSubscriber(batchServiceId, batchProtocolId, batchPatientIds); } batchServiceId = serviceId; batchProtocolId = protocolId; batchPatientIds = new ArrayList<>(); } batchPatientIds.add(patientId); } //do the remainder if (!batchPatientIds.isEmpty()) { LOG.debug("Doing batch of " + batchPatientIds.size() + " for service " + batchServiceId + " and protocol " + batchProtocolId); QueueHelper.queueUpFullServiceForPopulatingSubscriber(batchServiceId, batchProtocolId, batchPatientIds); } conn.close(); LOG.info("Finished sending patients to subscriber from " + tableName); } catch (Throwable t) { LOG.error("", t); } } /** * checks Services to see if any queued up exchange was not yet processed when a bulk subscriber load was started, * meaning that some data was not sent to that subscriber. Populates a table with IDs that can then be queued up * for sending * * tableName should be of a table with this schema: create table tmp.patients_to_requeue ( service_id char(36), protocol_id char(36), bulk_exchange_id char(36), patient_id char(36) ); */ private static void findMissedExchanges(String tableName, String odsCodeRegex) { LOG.info("Finding missed exchanges filtering on orgs using " + odsCodeRegex + ", storing results in " + tableName); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (shouldSkipService(service, odsCodeRegex)) { continue; } LOG.debug("Doing " + service); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { LOG.debug("Doing system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemId, Integer.MAX_VALUE); //go through exchanges and look for ones that were created for a bulk subscriber load for (int i=0; i<exchanges.size(); i++) { Exchange bulkExchange = exchanges.get(i); //if the exchange contains the header key to prevent re-queueing then it's possible //it's one for the bulk load boolean isBulkLoad = false; Boolean allowRequeuing = bulkExchange.getHeaderAsBoolean(HeaderKeys.AllowQueueing); if (allowRequeuing != null && !allowRequeuing.booleanValue()) { List<ExchangeEvent> events = exchangeDal.getExchangeEvents(bulkExchange.getId()); for (ExchangeEvent event: events) { String eventDesc = event.getEventDesc(); //note weird text check to handle the two versions of this message used if (eventDesc.contains("reated exchange to populate subscribers in protocol")) { isBulkLoad = true; LOG.debug("Bulk load found in exchange " + bulkExchange.getId() + " on " + bulkExchange.getTimestamp() + ": " + eventDesc); break; } } } if (!isBulkLoad) { continue; } //if this exchange is a bulk load, then we need to check any exchanges received BEFORE it //that didn't contain the protocol in their headers were 100% finished with their inbound //transform before the bulk load String[] protocolIds = bulkExchange.getHeaderAsStringArray(HeaderKeys.ProtocolIds); if (protocolIds.length != 1) { throw new Exception("Bulk Exchange " + bulkExchange.getId() + " has " + protocolIds.length + " protocol IDs in its header"); } String protocolId = protocolIds[0]; Date dtBulk = bulkExchange.getTimestamp(); Set<UUID> patientsToFix = new HashSet<>(); for (int j=i+1; j<exchanges.size(); j++) { Exchange priorExchange = exchanges.get(j); //skip any other special exchanges that are for bulk loads etc Boolean priorAllowRequeuing = priorExchange.getHeaderAsBoolean(HeaderKeys.AllowQueueing); if (priorAllowRequeuing != null && !priorAllowRequeuing.booleanValue()) { continue; } //skip any where the header contains the same protocol ID, as this data will have gone //to the subscriber anyway boolean hadSameProtocol = false; String[] priorProtocolIds = priorExchange.getHeaderAsStringArray(HeaderKeys.ProtocolIds); if (priorProtocolIds == null) { throw new Exception("Null protocol IDs for exchange " + priorExchange.getId()); } for (String priorProtocolId: priorProtocolIds) { if (priorProtocolId.equals(protocolId)) { hadSameProtocol = true; } } if (hadSameProtocol) { continue; } //skip any that didn't actually transform any dta ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(priorExchange.getId()); if (batches.isEmpty()) { continue; } List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(service.getId(), systemId, priorExchange.getId()); if (transformAudits.isEmpty()) { throw new Exception("No transform audits for exchange " + priorExchange.getId()); } ExchangeTransformAudit firstTransformAudit = null; for (ExchangeTransformAudit transformAudit: transformAudits) { if (transformAudit.getEnded() != null) { firstTransformAudit = transformAudit; break; } } if (firstTransformAudit == null) { throw new Exception("No finished transform audit found for exchange " + priorExchange.getId()); } Date dtTransform = firstTransformAudit.getEnded(); if (dtTransform.before(dtBulk)) { //if the transform finished before the bulk, then we're OK and don't need to look at any more exchanges break; } //if the transform didn't finish until AFTER the bulk was started, then this exchange's data //won't have gone to the subscriber LOG.debug("Exchange " + priorExchange.getId() + " finished transform on " + dtTransform + " so missed going to subscriber"); for (ExchangeBatch b: batches) { UUID patientId = b.getEdsPatientId(); if (patientId != null) { patientsToFix.add(patientId); } } LOG.debug("Found " + batches.size() + " batches, patients to fix = " + patientsToFix.size()); } LOG.debug("Found total " + patientsToFix.size() + " patients to fix"); //save the list of patients to a table if (!patientsToFix.isEmpty()) { Connection conn = ConnectionManager.getEdsConnection(); PreparedStatement ps = conn.prepareStatement("INSERT INTO " + tableName + " VALUES (?, ?, ?, ?)"); for (UUID patientId : patientsToFix) { int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, protocolId); ps.setString(col++, bulkExchange.getId().toString()); ps.setString(col++, patientId.toString()); ps.addBatch(); } ps.executeBatch(); conn.commit(); ps.close(); conn.close(); } } } } LOG.info("Finished finding missed exchanges"); } catch (Throwable t) { LOG.error("", t); } } private static boolean shouldSkipService(Service service, String odsCodeRegex) { if (odsCodeRegex == null) { return false; } String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); return true; } return false; } private static void createDeleteZipsForSubscriber(int batchSize, String sourceTable, int subscriberId) { LOG.info("Create Zips For Subscriber from " + sourceTable + " subscriberId " + subscriberId + " and batchSize " + batchSize); try { Connection conn = ConnectionManager.getEdsNonPooledConnection(); String sql = "SELECT enterprise_id FROM " + sourceTable + " WHERE done = 0 AND subscriber_id = ? LIMIT " + batchSize; PreparedStatement psSelect = conn.prepareStatement(sql); sql = "UPDATE " + sourceTable + " SET done = 1 WHERE enterprise_id = ?"; PreparedStatement psDone = conn.prepareStatement(sql); int batchesDone = 0; int idsDone = 0; while (true) { List<Long> ids = new ArrayList<>(); psSelect.setInt(1, subscriberId); ResultSet rs = psSelect.executeQuery(); while (rs.next()) { long id = rs.getLong(1); ids.add(new Long(id)); } if (ids.isEmpty()) { break; } OutputContainer container = new OutputContainer(); org.endeavourhealth.transform.subscriber.targetTables.Observation obsWriter = container.getObservations(); for (Long id: ids) { SubscriberId idWrapper = new SubscriberId(SubscriberTableId.OBSERVATION.getId(), id.longValue(), null, null); obsWriter.writeDelete(idWrapper); } byte[] bytes = container.writeToZip(); String base64 = Base64.getEncoder().encodeToString(bytes); SubscriberZipFileUUIDsDalI szfudi = DalProvider.factorySubscriberZipFileUUIDs(); szfudi.createSubscriberZipFileUUIDsEntity(subscriberId, UUID.randomUUID().toString(), UUID.randomUUID().toString(), base64); //update the table to say done batchesDone ++; for (Long id: ids) { psDone.setLong(1, id.longValue()); psDone.addBatch(); idsDone ++; } psDone.executeBatch(); conn.commit(); LOG.debug("Done " + batchesDone + ", total = " + idsDone); if (ids.size() < batchSize) { break; } } psSelect.close(); psDone.close(); conn.close(); LOG.debug("Finished at " + batchesDone + ", total = " + idsDone); LOG.info("Finished Create Zips For Subscriber"); } catch (Throwable t) { LOG.error("", t); } } /*private static void testJmx() { LOG.info("Testing JMX"); try { LOG.debug("----OperatingSystemMXBean--------------------------------"); OperatingSystemMXBean osb = ManagementFactory.getOperatingSystemMXBean(); LOG.debug("getName = " + osb.getName()); LOG.debug("getSystemLoadAverage = " + osb.getSystemLoadAverage()); LOG.debug("getArch = " + osb.getArch()); LOG.debug("getVersion = " + osb.getVersion()); LOG.debug("getAvailableProcessors = " + osb.getAvailableProcessors()); LOG.debug("----MemoryMXBean--------------------------------"); MemoryMXBean mb = ManagementFactory.getMemoryMXBean(); LOG.debug("getNonHeapMemoryUsage = " + mb.getNonHeapMemoryUsage()); LOG.debug("getHeapMemoryUsage = " + mb.getHeapMemoryUsage()); LOG.debug("getObjectPendingFinalizationCount = " + mb.getObjectPendingFinalizationCount()); LOG.debug("----MemoryMXBean--------------------------------"); com.sun.management.OperatingSystemMXBean sosb = (com.sun.management.OperatingSystemMXBean)ManagementFactory.getOperatingSystemMXBean(); LOG.debug("getSystemCpuLoad = " + sosb.getSystemCpuLoad()); LOG.debug("getTotalPhysicalMemorySize = " + sosb.getTotalPhysicalMemorySize()); LOG.info("Finished Testing JMX"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testDatabases(String odsCodesStr, String subscriberConfigNamesStr) { LOG.info("Testing all databases"); try { String[] odsCodes = odsCodesStr.split("\\|"); String[] subscriberConfigNames = subscriberConfigNamesStr.split("\\|"); for (String odsCode: odsCodes) { LOG.debug("---------------------------------------------------------------"); LOG.debug("Doing " + odsCode); //admin LOG.debug("Doing admin"); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); Service service = serviceDalI.getByLocalIdentifier(odsCode); LOG.debug("Admin test " + service); UUID serviceId = service.getId(); //EDS LOG.debug("Doing EDS"); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientIds = patientSearchDal.getPatientIds(serviceId); LOG.debug("EDS test = " + patientIds.size()); PatientLinkDalI patientLinkDalI = DalProvider.factoryPatientLinkDal(); List<PatientLinkPair> changes = patientLinkDalI.getChangesSince(new Date()); LOG.debug("EDS (hibernate) test = " + changes.size()); //reference LOG.debug("Doing reference"); String snomedTerm = TerminologyService.lookupSnomedTerm("10000006"); LOG.debug("Reference test = " + snomedTerm); //HL7 Receiver LOG.debug("Doing HL7 Receiver"); Hl7ResourceIdDalI hl7ResourceIdDal = DalProvider.factoryHL7ResourceDal(); ResourceId id = hl7ResourceIdDal.getResourceId("B", "Patient", "PIdAssAuth=2.16.840.1.113883.3.2540.1-PatIdValue=N7619764"); LOG.debug("HL7 receiver test = " + id); //audit LOG.debug("Doing audit"); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 100); LOG.debug("Audit test " + exchanges.size()); //publisher common LOG.debug("Doing publisher common"); EmisTransformDalI emisTransformDalI = DalProvider.factoryEmisTransformDal(); EmisCsvCodeMap codeMap = emisTransformDalI.getCodeMapping(false, 654010L); LOG.debug("Publisher common test " + codeMap); boolean wasAdminApplied = emisTransformDalI.wasAdminCacheApplied(serviceId); LOG.debug("Publisher common (hibernate) test " + wasAdminApplied); //sftp reader LOG.debug("Doing SFTP reader"); EntityManager entityManager = ConnectionManager.getSftpReaderEntityManager(); PreparedStatement ps = null; SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = null; if (ConnectionManager.isPostgreSQL(connection)) { sql = "SELECT instance_name FROM configuration.instance ORDER BY instance_name"; } else { sql = "SELECT instance_name FROM instance ORDER BY instance_name"; } ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); rs.next(); LOG.debug("SFTP Reader test " + rs.getString(1)); ps.close(); entityManager.close(); //publisher transform LOG.debug("Doing publisher transform"); ResourceIdTransformDalI resourceIdTransformDalI = DalProvider.factoryResourceIdTransformDal(); List<Reference> references = new ArrayList<>(); UUID patientId = patientIds.get(0); references.add(ReferenceHelper.createReference(ResourceType.Patient, patientId.toString())); Map<Reference, Reference> map = resourceIdTransformDalI.findSourceReferencesFromEdsReferences(serviceId, references); LOG.debug("publisher transform done " + map); //ehr LOG.debug("Doing EHR"); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceId, ResourceType.Patient.toString(), patientId); LOG.debug("EHR done " + (wrapper != null)); for (String subscriberConfigName: subscriberConfigNames) { //subscriber transform LOG.debug("Doing subscriber transform " + subscriberConfigName); SubscriberOrgMappingDalI subscriberOrgMappingDalI = DalProvider.factorySubscriberOrgMappingDal(subscriberConfigName); Long enterpriseId = subscriberOrgMappingDalI.findEnterpriseOrganisationId(serviceId.toString()); LOG.debug("Subscriber transform on " + subscriberConfigName + " done " + enterpriseId); //subscriber LOG.debug("Doing subscribers from " + subscriberConfigName); List<EnterpriseConnector.ConnectionWrapper> subscriberConnections = EnterpriseConnector.openConnection(subscriberConfigName); for (EnterpriseConnector.ConnectionWrapper subscriberConnection : subscriberConnections) { Connection connection1 = subscriberConnection.getConnection(); sql = "SELECT name FROM organization WHERE id = ?"; ps = connection1.prepareStatement(sql); if (enterpriseId != null) { ps.setLong(1, enterpriseId); } else { //if no ID found, just use a substitute number ps.setLong(1, 999); } rs = ps.executeQuery(); String orgId = null; if (rs.next()) { orgId = rs.getString(1); } LOG.debug("subscriber on " + subscriberConfigName + " (" + subscriberConnection.toString() + ") done " + orgId); ps.close(); connection1.close(); } } *//* FhirAudit("db_fhir_audit", true, "FhirAuditDb"), PublisherStaging("db_publisher_staging", false, "PublisherStagingDb"), DataGenerator("db_data_generator", true, "DataGeneratorDb"), *//* } LOG.info("Finished testing all databases"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEmisSnomedCodes(String odsCodeRegex) { LOG.info("Finished Fixing Emis Snomed codes for orgs " + odsCodeRegex); try { //find affected Code IDs LOG.info("Finding affected code IDs"); Set<Long> codeIds = new HashSet<>(); Map<Long, EmisCsvCodeMap> hmCodeCache = new HashMap<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisTransformDalI mappingRepository = DalProvider.factoryEmisTransformDal(); EntityManager publisherCommonEntityManager = ConnectionManager.getPublisherCommonEntityManager(); SessionImpl publisherCommonSession = (SessionImpl)publisherCommonEntityManager.getDelegate(); Connection publisherCommonConnection = publisherCommonSession.connection(); String sql = "SELECT code_id FROM emis_csv_code_map WHERE medication = false and read_code like '%-%'"; PreparedStatement ps = publisherCommonConnection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { long codeId = rs.getLong(1); codeIds.add(new Long(codeId)); } ps.close(); publisherCommonEntityManager.close(); LOG.info("Found " + codeIds.size() + " affected code IDs"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (odsCodeRegex != null) { String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } } LOG.info("-----------------------------------------------------------------"); LOG.info("Doing " + service); List<UUID> systems = findSystemIds(service); for (UUID systemId: systems) { LibraryItem libraryItem = LibraryRepositoryHelper.getLibraryItem(systemId); if (!libraryItem.getName().toUpperCase().contains("EMIS")) { LOG.info("Skipping system " + libraryItem.getName()); continue; } LOG.info("Doing system ID " + libraryItem.getName()); Set<String> hsObservationsDone = new HashSet<>(); Set<String> hsDiariesDone = new HashSet<>(); Set<String> hsConsultationsDone = new HashSet<>(); Set<String> hsSlotsDone = new HashSet<>(); EmisCsvHelper helper = new EmisCsvHelper(service.getId(), systemId, null, null, null); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); int fixed = 0; int found = 0; int exchangesDone = 0; for (Exchange exchange: exchanges) { exchangesDone ++; if (exchangesDone % 30 == 0) { LOG.info("Done " + exchangesDone + " of " + exchanges.size() + " exchanges"); } List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); if (files.isEmpty() || files.size() == 1) { //custom extract continue; } if (!EmisCsvToFhirTransformer.shouldProcessPatientData(helper)) { continue; } ExchangePayloadFile observationFile = findFileOfType(files, "CareRecord_Observation"); if (observationFile != null) { LOG.debug("Doing " + observationFile.getPath()); int obsRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(observationFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { obsRecordsDone ++; if (obsRecordsDone % 1000 == 0) { LOG.info("Done " + obsRecordsDone + " records"); } CSVRecord record = iterator.next(); String observationGuid = record.get("ObservationGuid"); if (hsObservationsDone.contains(observationGuid)) { continue; } hsObservationsDone.add(observationGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("CodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); Set<ResourceType> resourceTypes = ObservationTransformer.findOriginalTargetResourceTypes(helper, patientCell, observationCell); for (ResourceType resourceType : resourceTypes) { String sourceId = EmisCsvHelper.createUniqueId(patientCell, observationCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), resourceType, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), resourceType.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; if (resourceType == ResourceType.Condition) { Condition condition = (Condition) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(condition.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Procedure) { Procedure procedure = (Procedure) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(procedure.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.AllergyIntolerance) { AllergyIntolerance allergyIntolerance = (AllergyIntolerance) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(allergyIntolerance.getSubstance()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.FamilyMemberHistory) { FamilyMemberHistory familyMemberHistory = (FamilyMemberHistory) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(familyMemberHistory.getCondition().get(0).getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Immunization) { Immunization immunization = (Immunization) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(immunization.getVaccineCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.DiagnosticOrder) { DiagnosticOrder diagnosticOrder = (DiagnosticOrder) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(diagnosticOrder.getItem().get(0).getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Specimen) { Specimen specimen = (Specimen) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(specimen.getType()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.DiagnosticReport) { DiagnosticReport spediagnosticReportimen = (DiagnosticReport) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(spediagnosticReportimen.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.ReferralRequest) { ReferralRequest referralRequest = (ReferralRequest) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(referralRequest.getServiceRequested().get(0)); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else if (resourceType == ResourceType.Observation) { Observation observation = (Observation) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(observation.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); } else { throw new Exception("Unexpected resource type " + resourceType + " for ID " + uuid); } String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } } parser.close(); } ExchangePayloadFile diaryFile = findFileOfType(files, "CareRecord_Diary"); if (diaryFile != null) { LOG.debug("Doing " + diaryFile.getPath()); int diaryRecords = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(diaryFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { diaryRecords ++; if (diaryRecords % 1000 == 0) { LOG.info("Done " + diaryRecords + " records"); } CSVRecord record = iterator.next(); String diaryGuid = record.get("DiaryGuid"); if (hsDiariesDone.contains(diaryGuid)) { continue; } hsDiariesDone.add(diaryGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("CodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell diaryCell = CsvCell.factoryDummyWrapper(diaryGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, diaryCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.ProcedureRequest, sourceId); if (uuid == null) { continue; } //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.ProcedureRequest.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; ProcedureRequest procedureRequest = (ProcedureRequest) resource; Coding coding = ObservationCodeHelper.findOriginalCoding(procedureRequest.getCode()); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } ExchangePayloadFile consultationFile = findFileOfType(files, "CareRecord_Consultation"); if (consultationFile != null) { LOG.debug("Doing " + consultationFile.getPath()); int consultationRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(consultationFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { consultationRecordsDone ++; if (consultationRecordsDone % 1000 == 0) { LOG.info("Done " + consultationRecordsDone + " records"); } CSVRecord record = iterator.next(); String consultationGuid = record.get("ConsultationGuid"); if (hsConsultationsDone.contains(consultationGuid)) { continue; } hsConsultationsDone.add(consultationGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(codeIdStr)) { continue; } Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell consultationCell = CsvCell.factoryDummyWrapper(consultationGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, consultationCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.Encounter, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.Encounter.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; Encounter encounter = (Encounter) resource; Extension extension = ExtensionConverter.findExtension(encounter, FhirExtensionUri.ENCOUNTER_SOURCE); if (extension == null || !extension.hasValue()) { continue; } CodeableConcept codeableConcept = (CodeableConcept)extension.getValue(); Coding coding = ObservationCodeHelper.findOriginalCoding(codeableConcept); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } ExchangePayloadFile slotFile = findFileOfType(files, "Appointment_slot"); if (slotFile != null) { LOG.debug("Doing " + slotFile.getPath()); int slotRecordsDone = 0; InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(slotFile.getPath()); CSVParser parser = new CSVParser(reader, CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { slotRecordsDone ++; if (slotRecordsDone % 1000 == 0) { LOG.info("Done " + slotRecordsDone + " records"); } CSVRecord record = iterator.next(); String slotGuid = record.get("SlotGuid"); if (hsSlotsDone.contains(slotGuid)) { continue; } hsSlotsDone.add(slotGuid); String deleted = record.get("Deleted"); if (deleted.equalsIgnoreCase("true")) { continue; } String codeIdStr = record.get("DnaReasonCodeId"); Long codeId = Long.valueOf(codeIdStr); if (!codeIds.contains(codeId)) { continue; } found++; if (found % 100 == 0) { LOG.info("Found " + found + " records and fixed " + fixed); } EmisCsvCodeMap codeObj = hmCodeCache.get(codeId); if (codeObj == null) { codeObj = mappingRepository.getCodeMapping(false, codeId); hmCodeCache.put(codeId, codeObj); } String desiredCode = codeObj.getAdjustedCode(); String patientGuid = record.get("PatientGuid"); CsvCell slotCell = CsvCell.factoryDummyWrapper(slotGuid); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientGuid); String sourceId = EmisCsvHelper.createUniqueId(patientCell, slotCell); UUID uuid = IdHelper.getEdsResourceId(service.getId(), ResourceType.Appointment, sourceId); //need to get from history, so we get the version UUID //ResourceWrapper wrapper = resourceDal.getCurrentVersion(service.getId(), resourceType.toString(), uuid); List<ResourceWrapper> history = resourceDal.getResourceHistory(service.getId(), ResourceType.Appointment.toString(), uuid); if (history.isEmpty()) { continue; } ResourceWrapper wrapper = history.get(0); if (wrapper.isDeleted()) { continue; } Resource resource = wrapper.getResource(); String oldCode = null; Appointment encounter = (Appointment) resource; Extension extension = ExtensionConverter.findExtension(encounter, FhirExtensionUri.APPOINTMENT_DNA_REASON_CODE); if (extension == null || !extension.hasValue()) { continue; } CodeableConcept codeableConcept = (CodeableConcept)extension.getValue(); Coding coding = ObservationCodeHelper.findOriginalCoding(codeableConcept); oldCode = coding.getCode(); if (oldCode.equals(desiredCode)) { continue; } coding.setCode(desiredCode); String newJson = FhirSerializationHelper.serializeResource(resource); wrapper.setResourceData(newJson); //service_id, resource_id, resource_type, patient_id, term, old_original_code, new_original_code sql = "INSERT INTO tmp.emis_code_fix VALUES (?, ?, ?, ?, ?, ?, ?)"; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); ps = edsConnection.prepareStatement(sql); edsEntityManager.getTransaction().begin(); int col = 1; ps.setString(col++, service.getId().toString()); ps.setString(col++, wrapper.getPatientId().toString()); ps.setString(col++, wrapper.getResourceId().toString()); ps.setString(col++, wrapper.getResourceType()); ps.setString(col++, codeObj.getReadTerm()); ps.setString(col++, oldCode); ps.setString(col++, desiredCode); ps.executeUpdate(); edsEntityManager.getTransaction().commit(); ps.close(); edsEntityManager.close(); saveResourceWrapper(service.getId(), wrapper); fixed++; } parser.close(); } } LOG.info("Done " + exchangesDone + " of " + exchanges.size() + " exchanges"); LOG.info("Found " + found + " records and fixed " + fixed); } } LOG.info("Finished Fixing Emis Snomed codes for orgs " + odsCodeRegex); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void populatePatientSearchEpisodeOdsCode() { LOG.info("Populating Patient Search Episode ODS Codes"); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); int done = 0; String sql = "SELECT service_id, patient_id, episode_id FROM tmp.patient_search_episode_tmp WHERE done = 0"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setFetchSize(1000); ResultSet rs = ps.executeQuery(); while (rs.next()) { String serviceId = rs.getString(1); String patientId = rs.getString(2); String episodeId = rs.getString(3); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EpisodeOfCare episodeOfCare = (EpisodeOfCare)resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.EpisodeOfCare, episodeId); if (episodeOfCare != null && episodeOfCare.hasManagingOrganization()) { Reference orgReference = episodeOfCare.getManagingOrganization(); ReferenceComponents comps = org.endeavourhealth.common.fhir.ReferenceHelper.getReferenceComponents(orgReference); ResourceType type = comps.getResourceType(); String id = comps.getId(); resourceDal = DalProvider.factoryResourceDal(); Organization org = (Organization)resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), type, id); if (org != null) { String orgOdsCode = IdentifierHelper.findOdsCode(org); EntityManager edsEntityManager2 = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession2 = (SessionImpl)edsEntityManager2.getDelegate(); Connection edsConnection2 = edsSession2.connection(); sql = "UPDATE patient_search_episode SET ods_code = ? WHERE service_id = ? AND patient_id = ? AND episode_id = ?"; PreparedStatement ps2 = edsConnection2.prepareStatement(sql); edsEntityManager2.getTransaction().begin(); ps2.setString(1, orgOdsCode); ps2.setString(2, serviceId); ps2.setString(3, patientId); ps2.setString(4, episodeId); ps2.executeUpdate(); edsEntityManager2.getTransaction().commit(); ps2.close(); edsEntityManager2.close(); } } EntityManager edsEntityManager2 = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession2 = (SessionImpl)edsEntityManager2.getDelegate(); Connection edsConnection2 = edsSession2.connection(); sql = "UPDATE tmp.patient_search_episode_tmp SET done = ? WHERE service_id = ? AND patient_id = ? AND episode_id = ?"; PreparedStatement ps2 = edsConnection2.prepareStatement(sql); edsEntityManager2.getTransaction().begin(); ps2.setBoolean(1, true); ps2.setString(2, serviceId); ps2.setString(3, patientId); ps2.setString(4, episodeId); ps2.executeUpdate(); edsEntityManager2.getTransaction().commit(); ps2.close(); edsEntityManager2.close(); done ++; if (done % 100 == 0) { LOG.debug("Done " + done); } } rs.close(); ps.close(); LOG.debug("Done " + done); LOG.info("Finished Populating Patient Search Episode ODS Codes"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisDrugRecords(String odsCodeRegex) { LOG.info("Fixing Emis drug records"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { if (odsCodeRegex != null) { String odsCode = service.getLocalId(); if (Strings.isNullOrEmpty(odsCode) || !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } } //check if Emis String notes = service.getNotes(); if (notes == null || !notes.contains("CDB")) { LOG.info("Skipping as not Emis: " + service); continue; } LOG.info("Doing " + service); List<UUID> systems = findSystemIds(service); for (UUID systemId: systems) { LOG.info("Doing system ID " + systemId); LOG.info("Finding patients"); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientIds = patientSearchDal.getPatientIds(service.getId()); LOG.info("Found " + patientIds.size() + " patients"); //create dummy exchange String bodyJson = JsonSerializer.serialize(new ArrayList<ExchangePayloadFile>()); String odsCode = service.getLocalId(); Exchange exchange = null; UUID exchangeId = UUID.randomUUID(); List<UUID> batchIdsCreated = new ArrayList<>(); FhirResourceFiler filer = new FhirResourceFiler(exchangeId, service.getId(), systemId, new TransformError(), batchIdsCreated); int done = 0; for (UUID patientId : patientIds) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<ResourceWrapper> statementWrappers = resourceDal.getResourcesByPatient(service.getId(), patientId, ResourceType.MedicationStatement.toString()); List<ResourceWrapper> orderWrappers = null; //get on demand for (ResourceWrapper statementWrapper : statementWrappers) { MedicationStatement medicationStatement = (MedicationStatement) statementWrapper.getResource(); if (!medicationStatement.hasStatus()) { continue; } MedicationStatementBuilder builder = new MedicationStatementBuilder(medicationStatement); boolean fixed = false; Date cancellationDate = null; Extension outerExtension = ExtensionConverter.findExtension(medicationStatement, FhirExtensionUri.MEDICATION_AUTHORISATION_CANCELLATION); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, "date"); if (innerExtension != null) { DateType dt = (DateType) innerExtension.getValue(); cancellationDate = dt.getValue(); } } if (medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE) { //if active then ensure there's no cancellation date if (cancellationDate != null) { builder.setCancellationDate(null); fixed = true; } } else if (medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.COMPLETED) { //if non-active, then ensure there IS a cancellation date if (cancellationDate == null) { IssueRecordIssueDate mostRecentDate = null; Reference medicationStatementReference = ReferenceHelper.createReferenceExternal(medicationStatement); if (orderWrappers == null) { orderWrappers = resourceDal.getResourcesByPatient(service.getId(), patientId, ResourceType.MedicationOrder.toString()); } for (ResourceWrapper orderWrapper : orderWrappers) { //quick check against the raw JSON so we don't have to deserialise the bulk of them String orderJson = orderWrapper.getResourceData(); if (!orderJson.contains(medicationStatementReference.getReference())) { continue; } MedicationOrder order = (MedicationOrder) orderWrapper.getResource(); MedicationOrderBuilder medicationOrderBuilder = new MedicationOrderBuilder(order); Reference reference = medicationOrderBuilder.getMedicationStatementReference(); if (reference != null && ReferenceHelper.equals(reference, medicationStatementReference)) { DateTimeType started = medicationOrderBuilder.getDateWritten(); Integer duration = medicationOrderBuilder.getDurationDays(); IssueRecordIssueDate obj = new IssueRecordIssueDate(started, duration); if (obj.afterOrOtherIsNull(mostRecentDate)) { mostRecentDate = obj; } } } //if no issues exist for it, use the start date of the DrugRecord if (mostRecentDate == null) { Date d = medicationStatement.getDateAsserted(); mostRecentDate = new IssueRecordIssueDate(new DateTimeType(d), new Integer(0)); } Date d = mostRecentDate.getIssueDateType().getValue(); int duration = 0; Integer intObj = mostRecentDate.getIssueDuration(); if (intObj != null) { duration = intObj.intValue(); } Calendar cal = Calendar.getInstance(); cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, duration); cancellationDate = cal.getTime(); builder.setCancellationDate(cancellationDate); fixed = true; } } else { LOG.error("Unexpected status " + medicationStatement.getStatus() + " on resource " + statementWrapper); } if (fixed) { if (exchange == null) { exchange = new Exchange(); exchange.setId(exchangeId); exchange.setBody(bodyJson); exchange.setTimestamp(new Date()); exchange.setHeaders(new HashMap<>()); exchange.setHeaderAsUuid(HeaderKeys.SenderServiceUuid, service.getId()); exchange.setHeader(HeaderKeys.ProtocolIds, ""); //just set to non-null value, so postToExchange(..) can safely recalculate exchange.setHeader(HeaderKeys.SenderLocalIdentifier, odsCode); exchange.setHeaderAsUuid(HeaderKeys.SenderSystemUuid, systemId); exchange.setHeader(HeaderKeys.SourceSystem, MessageFormat.EMIS_CSV); exchange.setServiceId(service.getId()); exchange.setSystemId(systemId); AuditWriter.writeExchange(exchange); AuditWriter.writeExchangeEvent(exchange, "Manually created to re-process Emis DrugRecord data"); } //save resource filer.savePatientResource(null, false, builder); } } done++; if (done % 100 == 0) { LOG.info("Done " + done + " patients"); } } LOG.info("Done " + done + " patients"); //close down filer filer.waitToFinish(); if (exchange != null) { //set multicast header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post to Rabbit protocol queue List<UUID> exchangeIds = new ArrayList<>(); exchangeIds.add(exchange.getId()); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, true); } } } LOG.info("Finished Fixing Emis drug records"); } catch (Throwable t) { LOG.error("", t); } }*/ /** * populates the pseudo_id table on a new-style subscriber DB (MySQL or SQL Server) with pseudo_ids generated * from a salt */ /*private static void populateSubscriberPseudoId(String subscriberConfigName, String saltKeyName) { LOG.info("Populating subscriber DB pseudo ID for " + subscriberConfigName + " using " + saltKeyName); try { //find salt details JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); JsonNode linkDistributorsNode = config.get("pseudo_salts"); if (linkDistributorsNode == null) { throw new Exception("No pseudo_salts found in config"); } ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(linkDistributorsNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig[] arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); LinkDistributorConfig saltConfig = null; for (LinkDistributorConfig l : arr) { if (l.getSaltKeyName().equals(saltKeyName)) { saltConfig = l; } } if (saltConfig == null) { throw new Exception("No salt config found for " + saltKeyName); } String sql = "SELECT source_id, subscriber_id" + " FROM subscriber_id_map" + " WHERE source_id LIKE '" + ResourceType.Patient.toString() + "%'" + " AND subscriber_table = " + SubscriberTableId.PATIENT.getId(); Map<String, Long> hmPatients = new HashMap<>(); EntityManager subscriberTransformEntityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfigName); SessionImpl subscriberTransformSession = (SessionImpl)subscriberTransformEntityManager.getDelegate(); Connection subscriberTransformConnection = subscriberTransformSession.connection(); PreparedStatement ps = subscriberTransformConnection.prepareStatement(sql); ps.setFetchSize(1000); LOG.info("Running query to find patients"); ResultSet rs = ps.executeQuery(); while (rs.next()) { String sourceId = rs.getString(1); Long subscriberId = rs.getLong(2); hmPatients.put(sourceId, subscriberId); if (hmPatients.size() % 5000 == 0) { LOG.info("Found " + hmPatients.size()); } } ps.close(); subscriberTransformEntityManager.clear(); LOG.info("Query done, found " + hmPatients.size() + " patients"); int done = 0; int skipped = 0; File fixFile = new File("FIX_" + subscriberConfigName + "_" + saltKeyName + ".sql"); PrintWriter fixWriter = new PrintWriter(new BufferedWriter(new FileWriter(fixFile))); File errorFile = new File("ERRORS_" + subscriberConfigName + "_" + saltKeyName + ".txt"); PrintWriter errorWriter = new PrintWriter(new BufferedWriter(new FileWriter(errorFile))); LOG.info("Starting to process patients"); String fixSql = "DROP TABLE IF EXISTS pseudo_id_tmp;"; fixWriter.println(fixSql); fixSql = "CREATE TABLE pseudo_id_tmp (id bigint, patient_id bigint, salt_key_name varchar(50), pseudo_id varchar(255));"; fixWriter.println(fixSql); List<String> batch = new ArrayList<>(); for (String sourceId: hmPatients.keySet()) { Long subscriberId = hmPatients.get(sourceId); Reference ref = ReferenceHelper.createReference(sourceId); String patientUuidStr = ReferenceHelper.getReferenceId(ref); UUID patientUuid = UUID.fromString(patientUuidStr); //need to find the service ID PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); PatientSearch patientSearch = patientSearchDal.searchByPatientId(patientUuid); if (patientSearch == null) { errorWriter.println("Failed to find patient search record for " + sourceId + " with subscriber ID " + subscriberId); skipped ++; continue; } //find current FHIR patient UUID serviceId = patientSearch.getServiceId(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientUuidStr); if (patient == null) { errorWriter.println("Null FHIR Patient for " + sourceId + " with subscriber ID " + subscriberId); skipped ++; continue; } String pseudoId = PseudoIdBuilder.generatePsuedoIdFromConfig(subscriberConfigName, saltConfig, patient); //need to store in our pseudo ID mapping table if (pseudoId != null) { PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfigName); pseudoIdDal.saveSubscriberPseudoId(patientUuid, subscriberId.longValue(), saltKeyName, pseudoId); String pseudoIdRowSourceId = ReferenceHelper.createReferenceExternal(patient).getReference() + PatientTransformer.PREFIX_PSEUDO_ID + saltKeyName; SubscriberResourceMappingDalI enterpriseIdDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); SubscriberId pseudoIdRowId = enterpriseIdDal.findOrCreateSubscriberId(SubscriberTableId.PSEUDO_ID.getId(), pseudoIdRowSourceId); batch.add("(" + pseudoIdRowId.getSubscriberId() + ", " + subscriberId + ", '" + saltKeyName + "', '" + pseudoId + "')"); if (batch.size() >= 50) { fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES " + String.join(", ", batch) + ";"; fixWriter.println(fixSql); batch.clear(); } //fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES (" + pseudoIdRowId.getSubscriberId() + ", " + subscriberId + ", '" + saltKeyName + "', '" + pseudoId + "');"; //fixWriter.println(fixSql); } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + ", skipped " + skipped); } } if (!batch.isEmpty()) { fixSql = "INSERT INTO pseudo_id_tmp (id, patient_id, salt_key_name, pseudo_id) VALUES " + String.join(", ", batch) + ";"; fixWriter.println(fixSql); } fixSql = "CREATE INDEX ix ON pseudo_id_tmp (patient_id);"; fixWriter.println(fixSql); fixSql = "DELETE FROM pseudo_id WHERE salt_key_name = '" + saltKeyName + "';"; fixWriter.println(fixSql); fixSql = "INSERT INTO pseudo_id SELECT t.id, t.patient_id, t.salt_key_name, t.pseudo_id FROM pseudo_id_tmp t INNER JOIN patient p ON p.id = t.patient_id;"; fixWriter.println(fixSql); fixSql = "DROP TABLE pseudo_id_tmp;"; fixWriter.println(fixSql); fixWriter.close(); errorWriter.close(); LOG.info("Finished Populating subscriber DB pseudo ID for " + subscriberConfigName + " using " + saltKeyName); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void investigateMissingPatients(String nhsNumberFile, String protocolName, String subscriberConfigName, String ccgCodeRegex) { LOG.info("Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); String salt = null; JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); ArrayNode linked = (ArrayNode)config.get("linkedDistributors"); for (int i=0; i<linked.size(); i++) { JsonNode linkedElement = linked.get(i); String name = linkedElement.get("saltKeyName").asText(); if (name.equals("EGH")) { salt = linkedElement.get("salt").asText(); } } //go through file and check File inputFile = new File(nhsNumberFile); if (!inputFile.exists()) { throw new Exception(nhsNumberFile + " doesn't exist"); } List<String> nhsNumbers = Files.readAllLines(inputFile.toPath()); LOG.info("Found " + nhsNumbers.size()); String fileName = FilenameUtils.getBaseName(nhsNumberFile); File outputCsvFile = new File("OUTPUT_" + fileName + ".csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputCsvFile)); CSVPrinter outputPrinter = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader("nhs_number", "pseudo_id", "finding", "comment")); File outputTextFile = new File("OUTPUT_" + nhsNumberFile); List<String> lines = new ArrayList<>(); for (String nhsNumber: nhsNumbers) { LOG.debug("Doing " + nhsNumber); PseudoIdBuilder b = new PseudoIdBuilder(subscriberConfigName, "EGH", salt); b.addValueNhsNumber("NhsNumber", nhsNumber, null); String calcPseudoId = b.createPseudoId(); String finding = null; String comment = null; lines.add(">>>>>>>>> " + nhsNumber + " <<<<<<<<<<"); EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); String sql = "select patient_id, service_id, local_id, ccg_code" + " from eds.patient_link_person p" + " inner join eds.patient_link_history h" + " on h.new_person_id = p.person_id" + " inner join admin.service s" + " on s.id = service_id" + " where nhs_number = ?" + " and organisation_type = 'PR'"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); List<PatientInfo> patientInfos = new ArrayList<>(); //LOG.debug(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { PatientInfo info = new PatientInfo(); info.patientUuid = rs.getString(1); info.serviceUuid = rs.getString(2); info.odsCode = rs.getString(3); info.ccgCode = rs.getString(4); patientInfos.add(info); } ps.close(); edsEntityManager.close(); //check to see if the patient does exist in the CCG but has been deleted or had their NHS number changed for (PatientInfo info: patientInfos) { lines.add("Found " + info); if (!Pattern.matches(ccgCodeRegex, info.ccgCode)) { lines.add("Ignoring as out of CCG area"); continue; } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<ResourceWrapper> history = resourceDal.getResourceHistory(UUID.fromString(info.serviceUuid), ResourceType.Patient.toString(), UUID.fromString(info.patientUuid)); if (history.isEmpty()) { lines.add("No history found for patient"); finding = "ERROR"; comment = "Couldn't find FHIR resource history"; continue; } ResourceWrapper current = history.get(0); if (current.isDeleted()) { lines.add("Patient resource is deleted"); *//*finding = "Deleted"; comment = "Patient record has been deleted from DDS";*//* continue; } Patient currentFhir = (Patient) current.getResource(); String currentNhsNumber = IdentifierHelper.findNhsNumber(currentFhir); lines.add("Current NHS number = " + currentNhsNumber); if (!currentNhsNumber.equals(nhsNumber)) { boolean nhsNumberChanged = false; for (int i=1; i<history.size(); i++) { ResourceWrapper wrapper = history.get(i); if (wrapper.isDeleted()) { continue; } Patient past = (Patient) wrapper.getResource(); String pastNhsNumber = IdentifierHelper.findNhsNumber(past); lines.add("History " + i + " has NHS number " + pastNhsNumber); if (pastNhsNumber != null && pastNhsNumber.equals(nhsNumber)) { ResourceWrapper wrapperChanged = history.get(i-1); String changedNhsNumber = IdentifierHelper.findNhsNumber(past); lines.add("NHS number changed from " + nhsNumber + " to " + changedNhsNumber + " on " + sdf.format(wrapperChanged.getCreatedAt())); finding = "NHS number changed"; comment = "NHS number changed on " + sdf.format(wrapperChanged.getCreatedAt()); nhsNumberChanged = true; break; } } if (nhsNumberChanged) { continue; } } //if NHS number didn't change, then it SHOULD match the existing DB SubscriberResourceMappingDalI subscriberResourceMappingDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long enterpriseId = subscriberResourceMappingDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), info.patientUuid); if (enterpriseId == null) { finding = "ERROR"; comment = "Matches current NHS number, so should be in subscriber DB but can't find enterprise ID"; lines.add("" + info.patientUuid + ": no enterprise ID found"); continue; } List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); EnterpriseConnector.ConnectionWrapper first = connectionWrappers.get(0); sql = "SELECT id, target_skid" + " FROM patient" + " LEFT OUTER JOIN link_distributor" + " ON patient.pseudo_id = link_distributor.source_skid" + " WHERE patient.id = ?"; Connection enterpriseConnection = first.getConnection(); PreparedStatement enterpriseStatement = enterpriseConnection.prepareStatement(sql); enterpriseStatement.setLong(1, enterpriseId.longValue()); rs = enterpriseStatement.executeQuery(); if (rs.next()) { long id = rs.getLong(1); String pseudoId = rs.getString(2); lines.add("" + info.patientUuid + ": enterprise ID " + id + " with pseudo ID " + pseudoId); lines.add("" + info.patientUuid + ": expected pseudo ID " + calcPseudoId); if (pseudoId.equals(calcPseudoId)) { finding = "Match"; comment = "Matches current NHS number and is in subscriber DB"; lines.add("" + info.patientUuid + ": found in subscriber DB with right pseudo ID"); } else { finding = "Mis-match"; comment = "Matches current NHS number and is in subscriber DB but pseudo ID is different"; lines.add("" + info.patientUuid + ": found in subscriber DB but with wrong pseudo ID"); } } else { finding = "ERROR"; comment = "Matches current NHS number and enterprise ID = " + enterpriseId + " but not in DB"; } enterpriseStatement.close(); enterpriseConnection.close(); continue; } //if we've not found anything above, check patient_search for the NHS number to see if we can work out where they are if (finding == null) { lines.add("Checking patient_search"); //check patient search edsEntityManager = ConnectionManager.getEdsEntityManager(); edsSession = (SessionImpl)edsEntityManager.getDelegate(); edsConnection = edsSession.connection(); sql = "select local_id, ccg_code, pse.registration_start" + " from eds.patient_search ps" + " inner join admin.service s" + " on s.id = ps.service_id" + " and s.organisation_type = 'PR'" + " inner join eds.patient_search_episode pse" + " on pse.service_id = ps.service_id" + " and pse.patient_id = ps.patient_id" + " and pse.registration_end is null" + " where nhs_number = ?" + " order by pse.registration_start desc" + " limit 1"; ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); //LOG.debug(sql); rs = ps.executeQuery(); if (rs.next()) { String odsCode = rs.getString(1); String ccgCode = rs.getString(2); Date regDate = new Date(rs.getTimestamp(3).getTime()); OdsOrganisation odsOrg = OdsWebService.lookupOrganisationViaRest(odsCode); OdsOrganisation parentOdsOrg = null; if (!Strings.isNullOrEmpty(ccgCode)) { parentOdsOrg = OdsWebService.lookupOrganisationViaRest(ccgCode); } if (odsOrg == null) { lines.add("Registered at " + odsCode + " but failed to find ODS record for " + odsCode); finding = "ERROR"; comment = "Registered at " + odsCode + " but not found in open ODS"; } else if (parentOdsOrg == null) { finding = "ERROR"; comment = "Registered at " + odsOrg.getOdsCode() + " " + odsOrg.getOrganisationName() + " but no ODS record found for parent " + ccgCode; } else { finding = "Out of area"; comment = "Patient registered in " + parentOdsOrg.getOdsCode() + " " + parentOdsOrg.getOrganisationName() + " since " + sdf.format(regDate); } } else { finding = "Unknown"; comment = "No data for NHS number found (within scope of DDS)"; } ps.close(); edsEntityManager.close(); } outputPrinter.printRecord(nhsNumber, calcPseudoId, finding, comment); } Files.write(outputTextFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); outputPrinter.close(); LOG.info("Finished Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); } catch (Throwable t) { LOG.error("", t); } } static class PatientInfo { String patientUuid; String serviceUuid; String odsCode; String ccgCode; @Override public String toString() { return "ods " + odsCode + ", ccgCode " + ccgCode + ", serviceUuid " + serviceUuid + ", patientUUID " + patientUuid; } }*/ /*static class NhsNumberInfo { String odsCode; String date; String patientGuid; String patientUuid; String nhsNumber; String deleted; @Override public String toString() { return "ods " + odsCode + ", date " + date + ", patientGuid " + patientGuid + ", patientUUID " + patientUuid + ", NHS " + nhsNumber + ", deleted " + deleted; } } private static void investigateMissingPatients(String nhsNumberFile, String protocolName, String subscriberConfigName, String odsCodeRegex) { LOG.info("Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); //go through all publishers and find all history of NHS numbers File nhsNumberHistoryFile = new File(protocolName.replace(" ", "_") + "_NHS_number_history.txt"); if (!nhsNumberHistoryFile.exists()) { LOG.info("Need to create NHS number history file " + nhsNumberHistoryFile); LibraryItem matchedLibraryItem = null; LibraryDalI repository = DalProvider.factoryLibraryDal(); List<ActiveItem> activeItems = repository.getActiveItemByTypeId(Integer.valueOf(DefinitionItemType.Protocol.getValue()), Boolean.valueOf(false)); for (ActiveItem activeItem: activeItems) { Item item = repository.getItemByKey(activeItem.getItemId(), activeItem.getAuditId()); String xml = item.getXmlContent(); LibraryItem libraryItem = (LibraryItem) XmlSerializer.deserializeFromString(LibraryItem.class, xml, (String)null); String name = libraryItem.getName(); if (name.equals(protocolName)) { matchedLibraryItem = libraryItem; break; } } if (matchedLibraryItem == null) { throw new Exception("Failed to find protocol"); } for (ServiceContract serviceContract: matchedLibraryItem.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER) { continue; } String serviceIdStr = serviceContract.getService().getUuid(); UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceDal.getById(serviceId); String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } LOG.info("Doing " + service); List<String> lines = new ArrayList<>(); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient"); if (patientFile == null) { continue; } Date dataDate = exchange.getHeaderAsDate(HeaderKeys.DataDate); String dateDateStr = sdf.format(dataDate); //work out file version List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); //create the parser String path = patientFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); String patientUuidStr = null; UUID patientUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.Patient, patientGuid); if (patientUuid == null) { patientUuidStr = "NOUUID"; } else { patientUuidStr = patientUuid.toString(); } CsvCell nhsNumberCell = parser.getNhsNumber(); String nhsNumber = nhsNumberCell.getString(); if (Strings.isNullOrEmpty(nhsNumber)) { nhsNumber = "BLANK"; } else { nhsNumber.replace(" ", ""); } CsvCell deletedCell = parser.getDeleted(); String deletedStr = deletedCell.getString(); lines.add(odsCode + "_" + dateDateStr + "_" + patientGuid + "_" + patientUuidStr + "_" + nhsNumber + "_" + deletedStr); } parser.close(); } } Files.write(nhsNumberHistoryFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done " + service); } LOG.info("Created NHS number history file"); } Map<String, Set<String>> hmNhsNumberToPatientGuid = new HashMap<>(); Map<String, List<NhsNumberInfo>> hmPatientGuidHistory = new HashMap<>(); LOG.info("Reading in NHS number history"); int total = 0; String currentOdsCode = null; int odsCodesDone = 0; int totalAtOdsCode = 0; FileReader fr = new FileReader(nhsNumberHistoryFile); BufferedReader br = new BufferedReader(fr); while (true) { String historyLine = br.readLine(); if (historyLine == null) { break; } try { String[] toks = historyLine.split("_"); NhsNumberInfo info = new NhsNumberInfo(); info.odsCode = toks[0]; info.date = toks[1]; info.patientGuid = toks[2]; info.patientUuid = toks[3]; info.nhsNumber = toks[4]; info.deleted = toks[5]; //skip the Community services if (info.odsCode.equals("16441") || info.odsCode.equals("16456") || info.odsCode.equals("16962") || info.odsCode.equals("16998") || info.odsCode.equals("19594") || info.odsCode.equals("29605") || info.odsCode.equals("30159") || info.odsCode.equals("R1H") || info.odsCode.equals("R1H14") || info.odsCode.equals("R1H15") || info.odsCode.equals("RQX") || info.odsCode.equals("RWKGY")) { continue; } if (currentOdsCode == null || !currentOdsCode.equals(info.odsCode)) { currentOdsCode = info.odsCode; totalAtOdsCode = 0; odsCodesDone ++; LOG.info("Starting " + currentOdsCode + " org " + odsCodesDone); } Set<String> s = hmNhsNumberToPatientGuid.get(info.nhsNumber); if (s == null) { s = new HashSet<>(); hmNhsNumberToPatientGuid.put(info.nhsNumber, s); } s.add(info.patientGuid); List<NhsNumberInfo> l2 = hmPatientGuidHistory.get(info.patientGuid); if (l2 == null) { l2 = new ArrayList<>(); hmPatientGuidHistory.put(info.patientGuid, l2); } boolean addNew = true; if (!l2.isEmpty()) { //if this is just telling us the same as the previous one, ignore it NhsNumberInfo previous = l2.get(l2.size()-1); if (previous.nhsNumber.equals(info.nhsNumber)) { addNew = false; } } if (addNew) { l2.add(info); total ++; totalAtOdsCode ++; if (totalAtOdsCode % 10000 == 0) { LOG.info("Done " + totalAtOdsCode + " at " + currentOdsCode + " (total " + total + ")"); } } } catch (Exception ex) { throw new Exception("Error parsing line [" + historyLine + "]", ex); } } LOG.info("Read in NHS number history"); String salt = null; JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); ArrayNode linked = (ArrayNode)config.get("linkedDistributors"); for (int i=0; i<linked.size(); i++) { JsonNode linkedElement = linked.get(i); String name = linkedElement.get("saltKeyName").asText(); if (name.equals("EGH")) { salt = linkedElement.get("salt").asText(); } } //go through file and check File inputFile = new File(nhsNumberFile); if (!inputFile.exists()) { throw new Exception(nhsNumberFile + " doesn't exist"); } List<String> nhsNumbers = Files.readAllLines(inputFile.toPath()); LOG.info("Found " + nhsNumbers.size()); String fileName = FilenameUtils.getBaseName(nhsNumberFile); File outputCsvFile = new File("OUTPUT_" + fileName + ".csv"); BufferedWriter bw = new BufferedWriter(new FileWriter(outputCsvFile)); CSVPrinter outputPrinter = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader("nhs_number", "pseudo_id", "finding", "comment")); List<String> lines = new ArrayList<>(); for (String nhsNumber: nhsNumbers) { LOG.debug("Doing " + nhsNumber); PseudoIdBuilder b = new PseudoIdBuilder(subscriberConfigName, "EGH", salt); b.addValueNhsNumber("NhsNumber", nhsNumber, null); String calcPseudoId = b.createPseudoId(); String finding = null; String comment = null; lines.add(">>>>>>>>> " + nhsNumber + " <<<<<<<<<<"); Set<String> patientGuids = hmNhsNumberToPatientGuid.get(nhsNumber); if (patientGuids != null && !patientGuids.isEmpty()) { lines.add("Matches " + patientGuids.size() + " patient GUIDs: " + patientGuids); for (String patientGuid : patientGuids) { List<NhsNumberInfo> history = hmPatientGuidHistory.get(patientGuid); if (history == null) { throw new Exception("No history for patient GUID " + patientGuid); } //some very old data was received into AWS out of order (e.g. F84081), so need to sort them history.sort((o1, o2) -> o1.date.compareTo(o2.date)); //see if it matches the CURRENT NHS number from the Emis data NhsNumberInfo currentInfo = history.get(history.size() - 1); if (currentInfo.nhsNumber.equals(nhsNumber)) { lines.add("" + patientGuid + ": matches CURRENT NHS number (at " + currentInfo.odsCode + "), so SHOULD be in subscriber DB"); SubscriberResourceMappingDalI subscriberResourceMappingDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long enterpriseId = subscriberResourceMappingDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), currentInfo.patientUuid); if (enterpriseId == null) { String startDateStr = TransformConfig.instance().getEmisStartDate(currentInfo.odsCode); Date startDate = new SimpleDateFormat("dd/MM/yyyy").parse(startDateStr); lines.add("Org start date " + startDateStr); Date receivedDate = new SimpleDateFormat("yyyyMMdd").parse(currentInfo.date); //if only received before the start date, then we won't have processed it if (receivedDate.before(startDate)) { lines.add("Patient data received before org start date so won't have been processed"); //leave the finding null so we check patient_search } else { finding = "ERROR"; comment = "Matches current NHS number, so should be in subscriber DB but can't find enterprise ID"; lines.add("" + patientGuid + ": no enterprise ID found"); for (NhsNumberInfo info : history) { lines.add("" + patientGuid + ": " + info); } } } else { List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); EnterpriseConnector.ConnectionWrapper first = connectionWrappers.get(0); String sql = "SELECT id, target_skid" + " FROM patient" + " LEFT OUTER JOIN link_distributor" + " ON patient.pseudo_id = link_distributor.source_skid" + " WHERE patient.id = ?"; Connection enterpriseConnection = first.getConnection(); PreparedStatement enterpriseStatement = enterpriseConnection.prepareStatement(sql); enterpriseStatement.setLong(1, enterpriseId.longValue()); ResultSet rs = enterpriseStatement.executeQuery(); if (rs.next()) { long id = rs.getLong(1); String pseudoId = rs.getString(2); lines.add("" + patientGuid + ": enterprise ID " + id + " with pseudo ID " + pseudoId); //LOG.debug("Salt = " + salt); lines.add("" + patientGuid + ": expected pseudo ID " + calcPseudoId); if (pseudoId.equals(calcPseudoId)) { finding = "Match"; comment = "Matches current NHS number and is in subscriber DB"; lines.add("" + patientGuid + ": found in subscriber DB with right pseudo ID"); } else { finding = "Mis-match"; comment = "Matches current NHS number and is in subscriber DB but pseudo ID is different"; lines.add("" + patientGuid + ": found in subscriber DB but with wrong pseudo ID"); } } else { finding = "ERROR"; comment = "Matches current NHS number and enterprise ID = " + enterpriseId + " but not in DB"; } enterpriseStatement.close(); enterpriseConnection.close(); } } else { lines.add("" + patientGuid + ": doesn't match current NHS number (at " + currentInfo.odsCode + ") which is " + currentInfo.nhsNumber); for (NhsNumberInfo info : history) { lines.add("" + patientGuid + ": " + info); } //find out when the NHS number changed NhsNumberInfo infoChanged = null; for (int i = history.size() - 1; i >= 0; i--) { NhsNumberInfo info = history.get(i); if (info.nhsNumber.equals(nhsNumber)) { infoChanged = history.get(i + 1); break; } } if (infoChanged != null) { lines.add("" + patientGuid + ": NHS number changed on " + infoChanged.date + " (at " + infoChanged.odsCode + ") to " + currentInfo.nhsNumber); finding = "NHS number changed"; comment = "NHS number changed on " + infoChanged.date; //comment = "NHS number changed on " + infoChanged.date + " to " + currentInfo.nhsNumber; } else { lines.add("" + patientGuid + ": ERROR - FAILED TO FIND MATCHING NHS NUMBER IN HISTORY"); finding = "ERROR"; comment = "FAILED TO FIND MATCHING NHS NUMBER IN HISTORY"; } } } } //if we've not found anything above, check patient_search for the NHS number to see if we can work out where they are if (finding == null) { lines.add("Checking patient_search"); //check patient search EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl edsSession = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = edsSession.connection(); String sql = "select local_id, ccg_code" + " from eds.patient_search ps" + " inner join admin.service s" + " on s.id = ps.service_id" + " and s.organisation_type = 'PR'" + " inner join eds.patient_search_episode pse" + " on pse.service_id = ps.service_id" + " and pse.patient_id = ps.patient_id" + " and pse.registration_end is null" + " where nhs_number = ?" + " order by pse.registration_start desc" + " limit 1"; PreparedStatement ps = edsConnection.prepareStatement(sql); ps.setString(1, nhsNumber); //LOG.debug(sql); ResultSet rs = ps.executeQuery(); if (rs.next()) { String odsCode = rs.getString(1); String ccgCode = rs.getString(2); OdsOrganisation odsOrg = OdsWebService.lookupOrganisationViaRest(odsCode); OdsOrganisation parentOdsOrg = null; if (!Strings.isNullOrEmpty(ccgCode)) { parentOdsOrg = OdsWebService.lookupOrganisationViaRest(ccgCode); } if (odsOrg == null) { lines.add("Registered at " + odsCode + " but failed to find ODS record for " + odsCode); finding = "ERROR"; comment = "Registered at " + odsCode + " but not found in open ODS"; } else if (parentOdsOrg == null) { finding = "ERROR"; comment = "Registered at " + odsOrg.getOdsCode() + " " + odsOrg.getOrganisationName() + " but no ODS record found for parent " + ccgCode; } else { finding = "Out of area"; comment = "Patient registered in " + parentOdsOrg.getOdsCode() + " " + parentOdsOrg.getOrganisationName(); } } else { finding = "Unknown"; comment = "No data for NHS number found (within scope of DDS)"; } ps.close(); edsEntityManager.close(); } outputPrinter.printRecord(nhsNumber, calcPseudoId, finding, comment); } File outputFile = new File("OUTPUT_" + nhsNumberFile); Files.write(outputFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.WRITE); outputPrinter.close(); LOG.info("Finished Investigating Missing Patients from " + nhsNumberFile + " in Protocol " + protocolName); } catch (Throwable t) { LOG.error("", t); } }*/ private static void fixMedicationStatementIsActive(String odsCodeRegex) { LOG.info("Fixing MedicationStatement IsActive for using " + odsCodeRegex); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } String serviceIdStr = serviceId.toString(); //find protocols List<LibraryItem> publisherLibraryItems = new ArrayList<>(); List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr, null); for (LibraryItem libraryItem: libraryItems) { for (ServiceContract serviceContract: libraryItem.getProtocol().getServiceContract()) { if (serviceContract.getService().getUuid().equals(serviceIdStr) && serviceContract.getType() == ServiceContractType.PUBLISHER && serviceContract.getActive() == ServiceContractActive.TRUE) { publisherLibraryItems.add(libraryItem); break; } } } if (publisherLibraryItems.isEmpty()) { LOG.debug("Skipping " + service + " as not a publisher to any protocol"); continue; } LOG.debug("Doing " + service); //find subscriber config name and software name for each protocol Map<LibraryItem, String> hmSubscriberConfigNames = new HashMap<>(); Map<LibraryItem, String> hmSoftwareNames = new HashMap<>(); for (LibraryItem libraryItem: publisherLibraryItems) { List<String> subscriberConfigNames = new ArrayList<>(); List<String> softwareNames = new ArrayList<>(); //LOG.debug("Protocol name = " + libraryItem.getName()); for (ServiceContract serviceContract : libraryItem.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER && serviceContract.getActive() == ServiceContractActive.TRUE) { String subscriberConfigName = MessageTransformOutbound.getSubscriberEndpoint(serviceContract); //LOG.debug(" subscriber config = [" + subscriberConfigName + "]"); if (!Strings.isNullOrEmpty(subscriberConfigName)) { String technicalInterfaceUuidStr = serviceContract.getTechnicalInterface().getUuid(); String systemUuidStr = serviceContract.getSystem().getUuid(); TechnicalInterface technicalInterface = LibraryRepositoryHelper.getTechnicalInterfaceDetailsUsingCache(systemUuidStr, technicalInterfaceUuidStr); String software = technicalInterface.getMessageFormat(); //ignore any service contracts not for these formats if (!software.equals(MessageFormat.ENTERPRISE_CSV)) { /*if (!software.equals(MessageFormat.ENTERPRISE_CSV) && !software.equals(MessageFormat.SUBSCRIBER_CSV)) {*/ continue; } if (!subscriberConfigNames.contains(subscriberConfigName)) { subscriberConfigNames.add(subscriberConfigName); } if (!softwareNames.contains(software)) { softwareNames.add(software); } } } } //the DPA protocols won't have any subscribers if (subscriberConfigNames.size() == 0) { LOG.debug("Failed to find subscriber config name for protocol " + libraryItem.getName()); continue; } if (subscriberConfigNames.size() > 1) { throw new Exception("Found more than one subscriber config name for protocol " + libraryItem.getName() + ": " + subscriberConfigNames); } String subscriberConfigName = subscriberConfigNames.get(0); String softwareName = softwareNames.get(0); hmSubscriberConfigNames.put(libraryItem, subscriberConfigName); hmSoftwareNames.put(libraryItem, softwareName); LOG.info("Protocol " + libraryItem.getName() + " -> " + softwareName + " @ " + subscriberConfigName); } List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); Map<String, List<Long>> hmMedicationStatementIdsForService = new HashMap<>(); int found = 0; for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + found); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.MedicationStatement.toString()); for (ResourceWrapper resourceWrapper: resourceWrappers) { MedicationStatement medicationStatement = (MedicationStatement)resourceWrapper.getResource(); boolean isActive = medicationStatement.hasStatus() && medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE; if (!isActive) { continue; } found ++; for (LibraryItem libraryItem: publisherLibraryItems) { String subscriberConfigName = hmSubscriberConfigNames.get(libraryItem); if (Strings.isNullOrEmpty(subscriberConfigName)) { continue; } String softwareName = hmSoftwareNames.get(libraryItem); SubscriberResourceMappingDalI subscriberDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); Long id = null; if (softwareName.equals(MessageFormat.ENTERPRISE_CSV)) { Long enterpriseId = subscriberDal.findEnterpriseIdOldWay(ResourceType.MedicationStatement.toString(), resourceWrapper.getResourceId().toString()); if (enterpriseId != null) { id = enterpriseId; } } else if (softwareName.equals(MessageFormat.SUBSCRIBER_CSV)) { String ref = resourceWrapper.getReferenceString(); SubscriberId subscriberId = subscriberDal.findSubscriberId(SubscriberTableId.MEDICATION_STATEMENT.getId(), ref); if (subscriberId != null) { id = subscriberId.getSubscriberId(); } } else { //throw new Exception("Unexpected software name " + softwareName); } if (id != null) { List<Long> l = hmMedicationStatementIdsForService.get(subscriberConfigName); if (l == null) { l = new ArrayList<>(); hmMedicationStatementIdsForService.put(subscriberConfigName, l); } l.add(id); } } } } LOG.info("Finished, Done " + patientUuids.size() + " and found " + found); for (String subscriberConfigName: hmMedicationStatementIdsForService.keySet()) { List<Long> medicationStatementIdsForService = hmMedicationStatementIdsForService.get(subscriberConfigName); List<String> lines = new ArrayList<>(); lines.add("#" + odsCode); List<String> batch = new ArrayList<>(); while (!medicationStatementIdsForService.isEmpty()) { Long l = medicationStatementIdsForService.remove(0); batch.add(l.toString()); if (medicationStatementIdsForService.isEmpty() || batch.size() > 50) { String sql = "UPDATE medication_statement SET is_active = 1 WHERE cancellation_date IS NULL AND id IN (" + String.join(",", batch) + ");"; lines.add(sql); batch.clear(); } } LOG.debug("Going to write to file"); File f = new File(subscriberConfigName + ".sql"); Files.write(f.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done write to file"); } } LOG.info("Fixing MedicationStatement IsActive for using " + odsCodeRegex); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixMedicationStatementIsActive(String protocolName, String filePath, String odsCodeRegex) { LOG.info("Fixing MedicationStatement IsActive for " + protocolName + " to " + filePath + " matching orgs using " + odsCodeRegex); try { Set<String> odsCodesDone = new HashSet<>(); File f = new File(filePath); if (f.exists()) { List<String> lines = FileUtils.readLines(f); for (String line: lines) { if (line.startsWith("#")) { String odsCode = line.substring(1); odsCodesDone.add(odsCode); } } } //find services in protocol //find subscriber config details ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String serviceIdStr = serviceId.toString(); //find protocol LibraryItem matchedProtocol = null; List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId.toString(), null); for (LibraryItem protocol: libraryItems) { if (protocol.getName().equalsIgnoreCase(protocolName)) { matchedProtocol = protocol; break; } } if (matchedProtocol == null) { LOG.debug("Skipping " + service + " as not in protocol " + protocolName); continue; } //ensure in protocol as a publisher boolean isPublisher = false; for (ServiceContract serviceContract: matchedProtocol.getProtocol().getServiceContract()) { if (serviceContract.getService().getUuid().equals(serviceIdStr) && serviceContract.getType() == ServiceContractType.PUBLISHER && serviceContract.getActive() == ServiceContractActive.TRUE) { isPublisher = true; break; } } if (!isPublisher) { LOG.debug("Skipping " + service + " as not a publisher to protocol " + protocolName); continue; } String odsCode = service.getLocalId(); if (odsCodeRegex != null && !Pattern.matches(odsCodeRegex, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } if (odsCodesDone.contains(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); //find subscriber config name List<String> subscriberConfigNames = new ArrayList<>(); List<String> softwareNames = new ArrayList<>(); for (ServiceContract serviceContract: matchedProtocol.getProtocol().getServiceContract()) { if (serviceContract.getType() == ServiceContractType.SUBSCRIBER && serviceContract.getActive() == ServiceContractActive.TRUE) { String subscriberConfigName = MessageTransformOutbound.getSubscriberEndpoint(serviceContract); if (!Strings.isNullOrEmpty(subscriberConfigName)) { subscriberConfigNames.add(subscriberConfigName); String technicalInterfaceUuidStr = serviceContract.getTechnicalInterface().getUuid(); String systemUuidStr = serviceContract.getSystem().getUuid(); TechnicalInterface technicalInterface = LibraryRepositoryHelper.getTechnicalInterfaceDetailsUsingCache(systemUuidStr, technicalInterfaceUuidStr); String software = technicalInterface.getMessageFormat(); softwareNames.add(software); } } } if (subscriberConfigNames.size() == 0) { throw new Exception("Failed to find subscriber config name for protocol " + protocolName); } if (subscriberConfigNames.size() > 1) { throw new Exception("Found more than one subscriber config name for protocol " + protocolName); } String subscriberConfigName = subscriberConfigNames.get(0); String softwareName = softwareNames.get(0); LOG.info("Protocol " + protocolName + " -> " + softwareName + " @ " + subscriberConfigName); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); SubscriberResourceMappingDalI subscriberDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); List<Long> medicationStatementIdsForService = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + medicationStatementIdsForService.size()); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.MedicationStatement.toString()); for (ResourceWrapper resourceWrapper: resourceWrappers) { MedicationStatement medicationStatement = (MedicationStatement)resourceWrapper.getResource(); boolean isActive = medicationStatement.hasStatus() && medicationStatement.getStatus() == MedicationStatement.MedicationStatementStatus.ACTIVE; if (isActive) { if (softwareName.equals(MessageFormat.ENTERPRISE_CSV)) { Long enterpriseId = subscriberDal.findEnterpriseIdOldWay(ResourceType.MedicationStatement.toString(), resourceWrapper.getResourceId().toString()); if (enterpriseId != null) { medicationStatementIdsForService.add(enterpriseId); } } else if (softwareName.equals(MessageFormat.SUBSCRIBER_CSV)) { String ref = resourceWrapper.getReferenceString(); SubscriberId subscriberId = subscriberDal.findSubscriberId(SubscriberTableId.MEDICATION_STATEMENT.getId(), ref); if (subscriberId != null) { medicationStatementIdsForService.add(subscriberId.getSubscriberId()); } } else { throw new Exception("Unexpected software name " + softwareName); } } } } LOG.info("Done " + patientUuids.size() + " and found " + medicationStatementIdsForService.size()); odsCodesDone.add(odsCode); List<String> lines = new ArrayList<>(); lines.add("#" + odsCode); List<String> batch = new ArrayList<>(); while (!medicationStatementIdsForService.isEmpty()) { Long l = medicationStatementIdsForService.remove(0); batch.add(l.toString()); if (medicationStatementIdsForService.isEmpty() || batch.size() > 50) { String sql = "UPDATE medication_statement SET is_active = 1 WHERE cancellation_date IS NULL AND id IN (" + String.join(",", batch) + ");"; lines.add(sql); batch.clear(); } if (lines.size() % 10 == 0) { LOG.debug("Created " + lines.size() + " lines with " + medicationStatementIdsForService.size() + " IDs remaining"); } } LOG.debug("Going to write to file"); Files.write(f.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); LOG.debug("Done write to file"); } LOG.debug("Written to " + f); LOG.info("Finished Fixing Missing Emis Ethnicities to " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /** * restores the ethnicity and marital statuses on the Patient resources that were lost * if the "re-registrated patients" fix was run before the "deleted patients" fix. This meant that * the patient resource was re-created from the patient file but the ethnicity and marital status weren't carried * over from the pre-deleted version. */ /*private static void fixMissingEmisEthnicities(String filePath, String filterRegexOdsCode) { LOG.info("Fixing Missing Emis Ethnicities to " + filePath + " matching orgs using " + filterRegexOdsCode); try { Map<String, List<UUID>> hmPatientIds = new HashMap<>(); File f = new File(filePath); if (f.exists()) { List<String> lines = FileUtils.readLines(f); String currentOdsCode = null; for (String line: lines) { if (line.startsWith("#")) { currentOdsCode = line.substring(1); } else { UUID patientId = UUID.fromString(line); List<UUID> s = hmPatientIds.get(currentOdsCode); if (s == null) { s = new ArrayList<>(); hmPatientIds.put(currentOdsCode, s); } s.add(patientId); } } } ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { String odsCode = service.getLocalId(); if (filterRegexOdsCode != null && !Pattern.matches(filterRegexOdsCode, odsCode)) { LOG.debug("Skipping " + service + " due to regex"); continue; } if (hmPatientIds.containsKey(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs at service"); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> patientIdsForService = new ArrayList<>(); List<ResourceWrapper> resourceWrappersToSave = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + patientIdsForService.size()); } UUID patientUuid = patientUuids.get(i); List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); ResourceWrapper current = history.get(0); if (current.isDeleted()) { continue; } //if only one history record, no point looking back if (history.size() == 1) { continue; } Patient p = (Patient)current.getResource(); PatientBuilder patientBuilder = new PatientBuilder(p); //see if both already present EthnicCategory currentEthnicCategory = patientBuilder.getEthnicity(); MaritalStatus currentMaritalStatus = patientBuilder.getMaritalStatus(); if (currentEthnicCategory != null && currentMaritalStatus != null) { continue; } EthnicCategory newEthnicCategory = null; MaritalStatus newMaritalStatus = null; for (int j=1; j<history.size(); j++) { ResourceWrapper previousWrapper = history.get(j); if (previousWrapper.isDeleted()) { continue; } Patient previous = (Patient)previousWrapper.getResource(); PatientBuilder previousPatientBuilder = new PatientBuilder(previous); if (newEthnicCategory == null) { newEthnicCategory = previousPatientBuilder.getEthnicity(); } if (newMaritalStatus == null) { newMaritalStatus = previousPatientBuilder.getMaritalStatus(); } } if (newEthnicCategory == currentEthnicCategory && newMaritalStatus == currentMaritalStatus) { continue; } boolean fixed = false; if (newEthnicCategory != null) { patientBuilder.setEthnicity(newEthnicCategory); fixed = true; } if (newMaritalStatus != null) { patientBuilder.setMaritalStatus(newMaritalStatus); fixed = true; } if (fixed) { p = (Patient) patientBuilder.getResource(); String newJson = FhirSerializationHelper.serializeResource(p); current.setResourceData(newJson); patientIdsForService.add(patientUuid); resourceWrappersToSave.add(current); } } LOG.info("Done " + patientUuids.size() + " and found " + patientIdsForService.size()); hmPatientIds.put(odsCode, patientIdsForService); List<String> lines = new ArrayList<>(); for (String odsCodeDone: hmPatientIds.keySet()) { lines.add("#" + odsCodeDone); List<UUID> patientIdsDone = hmPatientIds.get(odsCodeDone); for (UUID patientIdDone: patientIdsDone) { lines.add(patientIdDone.toString()); } } FileUtils.writeLines(f, lines); //only now we've stored the affected patient IDs in the file should we actually update the DB for (ResourceWrapper wrapper: resourceWrappersToSave) { saveResourceWrapper(serviceId, wrapper); } //and re-queue the affected patients for sending to subscribers QueueHelper.queueUpPatientsForTransform(patientIdsForService); } LOG.debug("Written to " + f); LOG.info("Finished Fixing Missing Emis Ethnicities to " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /** * updates patient_search and patient_link tables for explicit list of patient UUIDs */ /*private static void updatePatientSearch(String filePath) throws Exception { LOG.info("Updating patient search from " + filePath); try { File f = new File(filePath); if (!f.exists()) { LOG.error("File " + f + " doesn't exist"); return; } List<UUID> patientIds = new ArrayList<>(); List<String> lines = FileUtils.readLines(f); for (String line: lines) { line = line.trim(); if (line.startsWith("#")) { continue; } UUID uuid = UUID.fromString(line); patientIds.add(uuid); } LOG.info("Found " + patientIds.size() + " patient UUIDs"); Map<String, UUID> hmPublishers = new HashMap<>(); List<String> publishers = new ArrayList<>(); publishers.add("publisher_01"); publishers.add("publisher_02"); publishers.add("publisher_03"); publishers.add("publisher_04"); publishers.add("publisher_04b"); publishers.add("publisher_05"); publishers.add("publisher_05_nwl_tmp"); publishers.add("publisher_05_sel_tmp"); File changedFile = new File(filePath + "changed"); for (String publisher: publishers) { UUID serviceId = findSuitableServiceIdForPublisherConfig(publisher); hmPublishers.put(publisher, serviceId); } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); int done = 0; int skipped = 0; for (UUID patientId: patientIds) { LOG.info("Doing patient " + patientId); //we need to find a service ID for the patient, so we know where to get the resources from UUID serviceId = null; //try the patient_search table first PatientSearch ps = patientSearchDal.searchByPatientId(patientId); if (ps != null) { serviceId = ps.getServiceId(); } //if service ID is still null, then try looking in the patient_link table if (serviceId == null) { String personId = patientLinkDal.getPersonId(patientId.toString()); Map<String, String> map = patientLinkDal.getPatientAndServiceIdsForPerson(personId); if (map.containsKey(patientId.toString())) { serviceId = UUID.fromString(map.get(patientId.toString())); } } List<ResourceWrapper> history = null; if (serviceId != null) { //if we have a service ID, then retrieve the resource history directly from that DB history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientId); } else { //if we still don't have a service ID, then test each Corexx DB in turn for (String publisher: hmPublishers.keySet()) { UUID exampleServiceId = hmPublishers.get(publisher); List<ResourceWrapper> publisherHistory = resourceDal.getResourceHistory(exampleServiceId, ResourceType.Patient.toString(), patientId); if (!publisherHistory.isEmpty()) { history = publisherHistory; LOG.info("Found resource history for patient " + patientId + " on " + publisher); break; } } } if (history == null || history.isEmpty()) { LOG.error("Failed to find any resource history for patient " + patientId); skipped ++; continue; } ResourceWrapper mostRecent = history.get(0); serviceId = mostRecent.getServiceId(); PatientLinkPair patientLink = null; if (mostRecent.isDeleted()) { //find most recent non-deleted ResourceWrapper nonDeleted = null; for (ResourceWrapper wrapper: history) { if (!wrapper.isDeleted()) { nonDeleted = wrapper; break; } } if (nonDeleted == null) { LOG.error("No non-deleted Patient resource for " + patientId); skipped ++; continue; } Patient p = (Patient)nonDeleted.getResource(); patientSearchDal.update(serviceId, p); patientLink = patientLinkDal.updatePersonId(serviceId, p); //and call this to mark the patient_search record as deleted patientSearchDal.deletePatient(serviceId, p); } else { //LOG.debug("Patient wasn't deleted"); Patient p = (Patient)mostRecent.getResource(); patientSearchDal.update(serviceId, p); patientLink = patientLinkDal.updatePersonId(serviceId, p); } //if the person ID was changed, write this to a file if (patientLink.getNewPersonId() != null) { List<String> updateLines = new ArrayList<>(); updateLines.add(patientId.toString()); Files.write(changedFile.toPath(), updateLines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } done ++; if (done % 500 == 0) { LOG.debug("Done " + done + " Skipped " + skipped); } } LOG.debug("Done " + done + " Skipped " + skipped); LOG.info("Finished Updating patient search from " + filePath); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void runPersonUpdater(String enterpriseConfigName) throws Exception { try { LOG.info("Person updater starting for " + enterpriseConfigName); //create this date BEFORE we get the date we last run, so there's no risk of a gap Date dateNextRun = new Date(); EnterprisePersonUpdaterHistoryDalI enterprisePersonUpdaterHistoryDal = DalProvider.factoryEnterprisePersonUpdateHistoryDal(enterpriseConfigName); Date dateLastRun = enterprisePersonUpdaterHistoryDal.findDatePersonUpdaterLastRun(); LOG.info("Looking for Person ID changes since " + dateLastRun); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); List<PatientLinkPair> changes = patientLinkDal.getChangesSince(dateLastRun); LOG.info("Found " + changes.size() + " changes in Person ID"); //find the Enterprise Person ID for each of the changes, hashing them by the enterprise instance they're on List<UpdateJob> updates = convertChangesToEnterprise(enterpriseConfigName, changes); List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(enterpriseConfigName); for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { LOG.info("Updating " + updates.size() + " person IDs on " + connectionWrapper.getUrl()); Connection connection = connectionWrapper.getConnection(); try { List<String> tables = findTablesWithPersonId(connection); for (UpdateJob update: updates) { changePersonId(update, connection, tables); } //and delete any person records that no longer have any references to them LOG.info("Going to delete orphaned persons"); deleteOrphanedPersons(connection); } finally { connection.close(); } } enterprisePersonUpdaterHistoryDal.updatePersonUpdaterLastRun(dateNextRun); LOG.info("Person updates complete"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void deleteOrphanedPersons(Connection connection) throws Exception { String sql = "SELECT id FROM person" + " WHERE NOT EXISTS (" + " SELECT 1" + " FROM patient" + " WHERE patient.person_id = person.id)"; PreparedStatement ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); List<Long> ids = new ArrayList<>(); while (rs.next()) { long id = rs.getLong(1); ids.add(new Long(id)); } LOG.info("Found " + ids.size() + " orphaned persons to delete"); rs.close(); ps.close(); sql = "DELETE FROM person WHERE id = ?"; ps = connection.prepareStatement(sql); for (int i=0; i<ids.size(); i++) { Long id = ids.get(i); ps.setLong(1, id); ps.addBatch(); //execute the batch every 50 and at the end if (i % 50 == 0 || i+1 == ids.size()) { ps.executeBatch(); } } connection.commit(); }*/ /*private static List<UpdateJob> convertChangesToEnterprise(String enterpriseConfigName, List<PatientLinkPair> changes) throws Exception { List<UpdateJob> updatesForConfig = new ArrayList<>(); for (PatientLinkPair change: changes) { String oldDiscoveryPersonId = change.getPreviousPersonId(); String newDiscoveryPersonId = change.getNewPersonId(); String discoveryPatientId = change.getPatientId(); SubscriberResourceMappingDalI enterpriseIdDalI = DalProvider.factorySubscriberResourceMappingDal(enterpriseConfigName); Long enterprisePatientId = enterpriseIdDalI.findEnterpriseIdOldWay(ResourceType.Patient.toString(), discoveryPatientId); //if this patient has never gone to enterprise, then skip it if (enterprisePatientId == null) { continue; } SubscriberPersonMappingDalI personMappingDal = DalProvider.factorySubscriberPersonMappingDal(enterpriseConfigName); List<Long> mappings = personMappingDal.findEnterprisePersonIdsForPersonId(oldDiscoveryPersonId); for (Long oldEnterprisePersonId: mappings) { Long newEnterprisePersonId = personMappingDal.findOrCreateEnterprisePersonId(newDiscoveryPersonId); updatesForConfig.add(new UpdateJob(enterprisePatientId, oldEnterprisePersonId, newEnterprisePersonId)); } } return updatesForConfig; }*/ private static void changePersonId(UpdateJob change, Connection connection, List<String> tables) throws Exception { for (String tableName: tables) { changePersonIdOnTable(tableName, change, connection); } connection.commit(); LOG.info("Updated person ID from " + change.getOldEnterprisePersonId() + " to " + change.getNewEnterprisePersonId() + " for patient " + change.getEnterprisePatientId()); } /*private static void changePersonId(UpdateJob change, Connection connection) throws Exception { OutputContainer outputContainer = new OutputContainer(true); //doesn't matter what we pass into the constructor //the csv writers are mapped to the tables in the database, so we can use them to discover //what tables have person and patient ID columns List<AbstractEnterpriseCsvWriter> csvWriters = outputContainer.getCsvWriters(); //the writers are in dependency order (least dependent -> most) so we need to go backwards to avoid //upsetting any foreign key constraints for (int i=csvWriters.size()-1; i>=0; i--) { AbstractEnterpriseCsvWriter csvWriter = csvWriters.get(i); String[] csvHeaders = csvWriter.getCsvHeaders(); for (String header: csvHeaders) { if (header.equalsIgnoreCase("person_id")) { String fileName = csvWriter.getFileName(); String tableName = FilenameUtils.removeExtension(fileName); changePersonIdOnTable(tableName, change, connection); break; } } } connection.commit(); LOG.info("Updated person ID from " + change.getOldEnterprisePersonId() + " to " + change.getNewEnterprisePersonId() + " for patient " + change.getEnterprisePatientId()); }*/ /*private static List<String> findTablesWithPersonId(Connection connection) throws Exception { Statement statement = connection.createStatement(); String dbNameSql = "SELECT DATABASE()"; ResultSet rs = statement.executeQuery(dbNameSql); rs.next(); String dbName = rs.getString(1); rs.close(); String tableNameSql = "SELECT t.table_name" + " FROM information_schema.tables t" + " INNER JOIN information_schema.columns c" + " ON c.table_name = t.table_name" + " AND c.table_schema = t.table_schema" + " WHERE t.table_schema = '" + dbName + "'" + " AND c.column_name = 'person_id'"; rs = statement.executeQuery(tableNameSql); List<String> ret = new ArrayList<>(); while (rs.next()) { String tableName = rs.getString(1); ret.add(tableName); } rs.close(); statement.close(); return ret; }*/ private static void changePersonIdOnTable(String tableName, UpdateJob change, Connection connection) throws Exception { StringBuilder sb = new StringBuilder(); sb.append("UPDATE "); sb.append(tableName); sb.append(" SET "); sb.append("person_id = ? "); sb.append("WHERE "); if (tableName.equals("patient")) { sb.append("id = ? "); } else { sb.append("patient_id = ? "); } sb.append("AND person_id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); update.setLong(1, change.getNewEnterprisePersonId()); update.setLong(2, change.getEnterprisePatientId()); update.setLong(3, change.getOldEnterprisePersonId()); update.addBatch(); update.executeBatch(); } static class UpdateJob { private Long enterprisePatientId = null; private Long oldEnterprisePersonId = null; private Long newEnterprisePersonId = null; public UpdateJob(Long enterprisePatientId, Long oldEnterprisePersonId, Long newEnterprisePersonId) { this.enterprisePatientId = enterprisePatientId; this.oldEnterprisePersonId = oldEnterprisePersonId; this.newEnterprisePersonId = newEnterprisePersonId; } public Long getEnterprisePatientId() { return enterprisePatientId; } public Long getOldEnterprisePersonId() { return oldEnterprisePersonId; } public Long getNewEnterprisePersonId() { return newEnterprisePersonId; } } /*private static void findPatientsThatNeedTransforming(String file, String filterOdsCode) { LOG.info("Finding patients that need transforming for " + filterOdsCode + " for " + file); try { Map<String, List<UUID>> hmPatientIds = new HashMap<>(); File f = new File(file); if (f.exists()) { List<String> lines = FileUtils.readLines(f); String currentOdsCode = null; for (String line: lines) { if (line.startsWith("#")) { currentOdsCode = line.substring(1); } else { UUID patientId = UUID.fromString(line); List<UUID> s = hmPatientIds.get(currentOdsCode); if (s == null) { s = new ArrayList<>(); hmPatientIds.put(currentOdsCode, s); } s.add(patientId); } } } ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { String odsCode = service.getLocalId(); if (filterOdsCode != null && filterOdsCode.equals(odsCode)) { continue; } if (hmPatientIds.containsKey(odsCode)) { LOG.debug("Already done " + service); continue; } LOG.debug("Doing " + service); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs"); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> patientIdsForService = new ArrayList<>(); for (int i = 0; i < patientUuids.size(); i++) { UUID patientUuid = patientUuids.get(i); boolean shouldBeInSubscriber = false; List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); boolean addPatient = false; for (int j = history.size() - 1; j >= 0; j--) { ResourceWrapper wrapper = history.get(j); if (wrapper.isDeleted()) { continue; } Patient patient = (Patient) wrapper.getResource(); //any confidential patient should be in the DB because they were previously filtered out BooleanType bt = (BooleanType) ExtensionConverter.findExtensionValue(patient, FhirExtensionUri.IS_CONFIDENTIAL); if (bt != null && bt.hasValue() && bt.getValue().booleanValue()) { addPatient = true; break; } //and patient w/o NHS number should be in the DB because they were previously filtered out //any patient with 999999 NHS number should be added so they get stripped out String nhsNumber = IdentifierHelper.findNhsNumber(patient); if (Strings.isNullOrEmpty(nhsNumber) || nhsNumber.startsWith("999999")) { addPatient = true; break; } if (j == history.size() - 1) { //find first NHS number known shouldBeInSubscriber = SubscriberTransformHelper.shouldPatientBePresentInSubscriber(patient); } else { boolean thisShouldBeInSubscriber = SubscriberTransformHelper.shouldPatientBePresentInSubscriber(patient); if (shouldBeInSubscriber != thisShouldBeInSubscriber) { addPatient = true; break; } } } if (addPatient) { patientIdsForService.add(patientUuid); } if (i % 1000 == 0) { LOG.info("Done " + i + " and found " + patientIdsForService.size()); } } hmPatientIds.put(odsCode, patientIdsForService); LOG.debug("Found " + patientIdsForService.size() + " affected"); QueueHelper.queueUpPatientsForTransform(patientIdsForService); List<String> lines = new ArrayList<>(); for (String odsCodeDone: hmPatientIds.keySet()) { lines.add("#" + odsCodeDone); List<UUID> patientIdsDone = hmPatientIds.get(odsCodeDone); for (UUID patientIdDone: patientIdsDone) { lines.add(patientIdDone.toString()); } } FileUtils.writeLines(f, lines); } LOG.debug("Written to " + f); } catch (Throwable t) { LOG.error("", t); } }*/ private static void transformPatients(String sourceFile) { LOG.info("Transforming patients from " + sourceFile); try { List<UUID> patientIds = new ArrayList<>(); File f = new File(sourceFile); if (!f.exists()) { LOG.error("File " + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); for (String line: lines) { line = line.trim(); if (line.startsWith("#")) { continue; } UUID uuid = UUID.fromString(line); patientIds.add(uuid); } if (patientIds.isEmpty()) { LOG.error("No patient IDs found"); return; } LOG.info("Found " + patientIds.size() + " patient IDs"); QueueHelper.queueUpPatientsForTransform(patientIds); LOG.info("Finished transforming patients from " + sourceFile); } catch (Throwable t) { LOG.error("", t); } } /*private static void countNhsNumberChanges(String odsCodes) { LOG.info("Counting NHS number changes for " + odsCodes); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); Map<String, Date> hmEarliestDate = new HashMap<>(); Map<String, Integer> hmPatientCount = new HashMap<>(); Map<String, Map<Date, List<UUID>>> hmCounts = new HashMap<>(); String[] toks = odsCodes.split(","); for (String odsCode: toks) { Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Doing " + service.getName() + " " + service.getLocalId()); UUID serviceId = service.getId(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); List<UUID> patientUuids = patientSearchDal.getPatientIds(serviceId); LOG.info("Found " + patientUuids.size() + " patient UUIDs"); Date earliestDate = null; Map<Date, List<UUID>> hmChanges = new HashMap<>(); for (int i = 0; i < patientUuids.size(); i++) { UUID patientUuid = patientUuids.get(i); String previousNhsNumber = null; List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientUuid); for (int j = history.size() - 1; j >= 0; j--) { ResourceWrapper wrapper = history.get(j); Date d = wrapper.getCreatedAt(); //work out bulk date if (earliestDate == null || d.before(earliestDate)) { earliestDate = d; } if (wrapper.isDeleted()) { continue; } Patient patient = (Patient) wrapper.getResource(); if (j == history.size() - 1) { //find first NHS number known previousNhsNumber = IdentifierHelper.findNhsNumber(patient); } else { String thisNhsNumber = IdentifierHelper.findNhsNumber(patient); if ((thisNhsNumber == null && previousNhsNumber != null) //|| (thisNhsNumber != null && previousNhsNumber == null) //don't count it going FROM null to non-null as a change || (thisNhsNumber != null && previousNhsNumber != null && !thisNhsNumber.equals(previousNhsNumber))) { //changed LOG.info("" + patientUuid + " changed NHS number on " + sdf.format(d)); List<UUID> l = hmChanges.get(d); if (l == null) { l = new ArrayList<>(); hmChanges.put(d, l); } l.add(patientUuid); previousNhsNumber = thisNhsNumber; } } } if (i % 1000 == 0) { LOG.info("Done " + i); } } hmEarliestDate.put(odsCode, earliestDate); hmPatientCount.put(odsCode, new Integer(patientUuids.size())); hmCounts.put(odsCode, hmChanges); } List<String> colHeaders = new ArrayList<>(); colHeaders.add("Year"); colHeaders.add("Month"); colHeaders.addAll(Arrays.asList(toks)); String[] headerArray = colHeaders.toArray(new String[]{}); CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader(headerArray); FileWriter fileWriter = new FileWriter("NHS_number_changes.csv"); CSVPrinter csvPrinter = new CSVPrinter(fileWriter, csvFormat); //patient count List<String> row = new ArrayList<>(); row.add("Patient Count"); row.add(""); for (String odsCode: toks) { Integer count = hmPatientCount.get(odsCode); if (count == null) { row.add("0"); } else { row.add("" + count); } } csvPrinter.printRecord(row.toArray()); //start date row = new ArrayList<>(); row.add("Bulk Date"); row.add(""); for (String odsCode: toks) { Date startDate = hmEarliestDate.get(odsCode); if (startDate == null) { row.add("not found"); } else { row.add("" + sdf.format(startDate)); } } csvPrinter.printRecord(row.toArray()); for (int year=2017; year<=2019; year++) { for (int month=Calendar.JANUARY; month<=Calendar.DECEMBER; month++) { String monthStr = "" + month; if (monthStr.length() < 2) { monthStr = "0" + monthStr; } Date monthStart = sdf.parse("" + year + monthStr + "01"); Calendar cal = Calendar.getInstance(); cal.setTime(monthStart); cal.add(Calendar.MONTH, 1); cal.add(Calendar.DAY_OF_YEAR, -1); Date monthEnd = cal.getTime(); row = new ArrayList<>(); row.add("" + year); row.add("" + (month+1)); for (String odsCode: toks) { Date startDate = hmEarliestDate.get(odsCode); if (startDate == null || startDate.after(monthStart)) { row.add(""); } else { int changes = 0; Map<Date, List<UUID>> hmChanges = hmCounts.get(odsCode); if (hmChanges != null) { for (Date d : hmChanges.keySet()) { if (!d.before(monthStart) && !d.after(monthEnd)) { List<UUID> uuids = hmChanges.get(d); changes += uuids.size(); } } } row.add("" + changes); } } csvPrinter.printRecord(row.toArray()); } } csvPrinter.close(); LOG.info("Finished counting NHS number changes for " + odsCodes); } catch (Throwable t) { LOG.error("", t); } }*/ private static void createDigest(String url, String user, String pass, String table, String columnFrom, String columnTo, String base64Salt, String validNhsNumberCol) { LOG.info("Creating Digest value from " + table + "." + columnFrom + " -> " + columnTo); try { byte[] saltBytes = Base64.getDecoder().decode(base64Salt); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT DISTINCT " + columnFrom + " FROM " + table; PreparedStatement psSelect = conn.prepareStatement(sql); psSelect.setFetchSize(1000); Connection conn2 = DriverManager.getConnection(url, props); if (validNhsNumberCol != null) { sql = "UPDATE " + table + " SET " + validNhsNumberCol + " = ?, " + columnTo + " = ? WHERE " + columnFrom + " = ?"; } else { sql = "UPDATE " + table + " SET " + columnTo + " = ? WHERE " + columnFrom + " = ?"; } PreparedStatement psUpdate = conn2.prepareStatement(sql); Connection conn3 = DriverManager.getConnection(url, props); if (validNhsNumberCol != null) { sql = "UPDATE " + table + " SET " + validNhsNumberCol + " = ?, " + columnTo + " = ? WHERE " + columnFrom + " IS NULL"; } else { sql = "UPDATE " + table + " SET " + columnTo + " = ? WHERE " + columnFrom + " IS NULL"; } PreparedStatement psUpdateNull = conn3.prepareStatement(sql); LOG.trace("Starting query"); ResultSet rs = psSelect.executeQuery(); LOG.trace("Query results back"); int done = 0; int batchSize = 0; while (rs.next()) { Object o = rs.getObject(1); String value = ""; if (o != null) { value = o.toString(); } TreeMap<String, String> keys = new TreeMap<>(); keys.put("DoesntMatter", value); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); String pseudoId = crypto.GetDigest(keys); if (o == null) { int col = 1; if (validNhsNumberCol != null) { int validNhsNunmber = isValidNhsNumber(value); psUpdateNull.setInt(col++, validNhsNunmber); } psUpdateNull.setString(col++, pseudoId); psUpdateNull.executeUpdate(); } else { int col = 1; if (validNhsNumberCol != null) { int validNhsNunmber = isValidNhsNumber(value); psUpdate.setInt(col++, validNhsNunmber); } psUpdate.setString(col++, pseudoId); psUpdate.setString(col++, value); psUpdate.addBatch(); batchSize++; if (batchSize >= 10) { psUpdate.executeBatch(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done); } } if (batchSize >= 0) { psUpdate.executeBatch(); } rs.close(); psSelect.close(); psUpdate.close(); psUpdateNull.close(); conn.close(); conn2.close(); conn3.close(); LOG.debug("Done " + done); LOG.info("Finished Creating Digest value from " + table + "." + columnFrom + " -> " + columnTo); } catch (Throwable t) { LOG.error("", t); } } private static int isValidNhsNumber(String fieldValue) { if (fieldValue == null) { return -1; } if (fieldValue.isEmpty()) { return -1; } if (fieldValue.length() != 10) { return 0; } int sum = 0; char[] chars = fieldValue.toCharArray(); for (int i=0; i<9; i++) { char c = chars[i]; if (!Character.isDigit(c)) { return 0; } int val = Character.getNumericValue(c); int weight = 10 - i; int m = val * weight; sum += m; //LOG.trace("" + c + " x " + weight + " = " + m + " sum = " + sum); } int remainder = sum % 11; int check = 11 - remainder; //LOG.trace("sum = " + sum + " mod 11 = " + remainder + " check = " + check); if (check == 11) { check = 0; } if (check == 10) { return 0; } char lastChar = chars[9]; int actualCheck = Character.getNumericValue(lastChar); if (check != actualCheck) { return 0; } return 1; } /*private static void checkForBartsMissingFiles(String sinceDate) { LOG.info("Checking for Barts missing files"); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); Date start2019 = sdf.parse(sinceDate); LOG.info("Checking files since " + sinceDate); UUID serviceId = UUID.fromString("b5a08769-cbbe-4093-93d6-b696cd1da483"); UUID systemId = UUID.fromString("e517fa69-348a-45e9-a113-d9b59ad13095"); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); Map<String, List<String>> hmByFileType = new HashMap<>(); Map<String, Date> hmReceivedDate = new HashMap<>(); for (Exchange exchange: exchanges) { String body = exchange.getBody(); //skip any exchanges pre-2019 Date d = exchange.getHeaderAsDate(HeaderKeys.DataDate); if (d.before(start2019)) { continue; } List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(body); for (ExchangePayloadFile file: files) { String type = file.getType(); String path = file.getPath(); String name = FilenameUtils.getName(path); List<String> l = hmByFileType.get(type); if (l == null) { l = new ArrayList<>(); hmByFileType.put(type, l); } l.add(name); hmReceivedDate.put(name, d); } } LOG.info("Parsed exchange bodies"); List<String> types = new ArrayList<>(hmByFileType.keySet()); types.sort((o1, o2) -> o1.compareToIgnoreCase(o2)); for (String type: types) { List<String> files = hmByFileType.get(type); LOG.info("---------------------------------------------------------------------"); LOG.info("Checking " + type + " with " + files.size()); if (type.equals("MaternityServicesDataSet") || type.equals("SusEmergency") || type.equals("SusEmergencyTail")) { continue; } if (type.equals("CriticalCare")) { //cc_BH_192575_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("Diagnosis")) { //rnj_pc_diag_20190330-011515.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("HomeDeliveryAndBirth")) { //hdb_BH_192576_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("MaternityBirth")) { //GETL_MAT_BIRTH_2019-03-30_001020_1431392750.txt checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyy-MM-dd", "_", 3); } else if (type.equals("Pregnancy")) { //GETL_MAT_PREG_2019-03-30_001020_1431392781.txt checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyy-MM-dd", "_", 3); } else if (type.equals("Problem")) { //rnj_pc_prob_20190328-011001.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("Procedure")) { //rnj_pc_proc_20180716-010530.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 3); } else if (type.equals("SurginetCaseInfo")) { //spfit_sn_case_info_rnj_20190812-093823.dat checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_|-", 5); } else if (type.equals("SusEmergencyCareDataSet")) { //susecd.190360 AND susecd_BH.190039 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusEmergencyCareDataSetTail")) { //tailecd_DIS.190362 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusInpatient")) { //ip_BH_193174_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "_", 2); } else if (type.equals("SusInpatientTail")) { //tailip_DIS.203225_susrnj.dat checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.|_", 2); } else if (type.equals("SusOutpatient")) { //susopa_BH.204612 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusOutpatientTail")) { //tailopa_DIS.204610 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("SusOutpatientTail")) { //tailopa_DIS.204610 checkForMissingFilesByNumber(type, hmReceivedDate, files, "\\.", 1); } else if (type.equals("APPSL2")) { //GETL_APPSL2_80130_RNJ_10072018_065345_1.TXT checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 4); } else if (type.equals("BlobContent")) { //Blob_Con_20190502_00198.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_", 2); } else if (type.equals("FamilyHistory")) { //Fam_Hist_20190417_00326.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "yyyyMMdd", "_", 2); } else { String first = files.get(0); if (first.contains("_RNJ_")) { //CLEVE_80130_RNJ_15072018_045416_6.TXT checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 3); } else { //Blob_Con_20190328_00170.csv checkForMissingFilesByDate(type, hmReceivedDate, files, "ddMMyyyy", "_", 2); } } } LOG.info("Finished Checking for Barts missing files"); } catch (Throwable t) { LOG.error("", t); } } private static void checkForMissingFilesByDate(String fileType, Map<String, Date> hmReceivedDate, List<String> files, String dateFormat, String delimiter, int token) throws Exception { SimpleDateFormat sdf = new SimpleDateFormat(dateFormat); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); Date minDate = null; Date maxDate = null; Map<Date, List<String>> hmByDate = new HashMap<>(); for (String file: files) { String[] toks = file.split(delimiter); String tok = null; Date d = null; try { tok = toks[token]; d = sdf.parse(tok); } catch (ParseException pe) { LOG.error("Error parsing " + tok + " with format " + dateFormat + " toks " + toks, pe); return; } //LOG.debug("File " + file + " -> " + tok + " -> " + sdf.format(d)); if (minDate == null || d.before(minDate)) { minDate = d; } if (maxDate == null || d.after(maxDate)) { maxDate = d; } List<String> l = hmByDate.get(d); if (l == null) { l = new ArrayList<>(); hmByDate.put(d, l); } l.add(file); } LOG.info("Checking for date range " + sdfOutput.format(minDate) + " to " + sdfOutput.format(maxDate)); Calendar cal = Calendar.getInstance(); Date d = new Date(minDate.getTime()); while (!d.after(maxDate)) { List<String> l = hmByDate.get(d); if (l == null) { cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, -1); Date dateBefore = cal.getTime(); List<String> before = hmByDate.get(dateBefore); String beforeDesc = null; if (before != null) { String firstBefore = before.get(0); Date beforeReceived = hmReceivedDate.get(firstBefore); beforeDesc = firstBefore + " on " + sdfOutput.format(beforeReceived); } cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, 1); Date dateAfter = cal.getTime(); List<String> after = hmByDate.get(dateAfter); String afterDesc = null; if (after != null) { String firstAfter = after.get(0); Date afterReceived = hmReceivedDate.get(firstAfter); afterDesc = firstAfter + " on " + sdfOutput.format(afterReceived); } LOG.error("No " + fileType + " file found for " + sdfOutput.format(d) + " previous [" + beforeDesc + "] after [" + afterDesc + "]"); } cal.setTime(d); cal.add(Calendar.DAY_OF_YEAR, 1); d = cal.getTime(); } } private static void checkForMissingFilesByNumber(String fileType, Map<String, Date> hmReceivedDate, List<String> files, String delimiter, int token) { SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); int maxNum = 0; int minNum = Integer.MAX_VALUE; Map<Integer, List<String>> hmByNum = new HashMap<>(); for (String file: files) { String[] toks = file.split(delimiter); String tok = null; int num = 0; try { tok = toks[token]; num = Integer.parseInt(tok); } catch (Exception ex) { LOG.error("Exception with " + file + " tok = [" + tok + "] and toks " + toks , ex); return; } maxNum = Math.max(num, maxNum); minNum = Math.min(num, minNum); List<String> l = hmByNum.get(new Integer(num)); if (l == null) { l = new ArrayList<>(); hmByNum.put(new Integer(num), l); } l.add(file); } LOG.info("Checking for number range " + minNum + " to " + maxNum); for (int i=minNum; i<=maxNum; i++) { List<String> l = hmByNum.get(new Integer(i)); if (l == null) { List<String> before = hmByNum.get(new Integer(i-1)); String beforeDesc = null; if (before != null) { String firstBefore = before.get(0); Date beforeReceived = hmReceivedDate.get(firstBefore); beforeDesc = firstBefore + " on " + sdfOutput.format(beforeReceived); } List<String> after = hmByNum.get(new Integer(i+1)); String afterDesc = null; if (after != null) { String firstAfter = after.get(0); Date afterReceived = hmReceivedDate.get(firstAfter); afterDesc = firstAfter + " on " + sdfOutput.format(afterReceived); } LOG.error("No " + fileType + " file found for " + i + " previous [" + beforeDesc + "] after [" + afterDesc + "]"); } } }*/ /*private static void deleteEnterpriseObs(String filePath, String configName, int batchSize) { LOG.info("Deleting Enterprise Observations"); try { String parent = FilenameUtils.getFullPath(filePath); String name = FilenameUtils.getName(filePath); String doneFilePath = FilenameUtils.concat(parent, "DONE" + name); Set<String> doneIds = new HashSet<>(); File f = new File(doneFilePath); if (f.exists()) { List<String> doneLines = Files.readAllLines(f.toPath()); for (String doneLine: doneLines) { doneIds.add(doneLine); } } LOG.debug("Previously done " + doneIds.size()); List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(configName); CSVParser parser = CSVParser.parse(new File(filePath), Charset.defaultCharset(), CSVFormat.TDF.withHeader()); Iterator<CSVRecord> iterator = parser.iterator(); List<String> batch = new ArrayList<>(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String id = record.get("id"); if (doneIds.contains(id)) { continue; } doneIds.add(id); batch.add(id); if (batch.size() >= batchSize) { saveBatch(batch, connectionWrappers, doneFilePath); } if (doneIds.size() % 1000 == 0) { LOG.debug("Done " + doneIds.size()); } } if (!batch.isEmpty()) { saveBatch(batch, connectionWrappers, doneFilePath); LOG.debug("Done " + doneIds.size()); } parser.close(); LOG.info("Finished Deleting Enterprise Observations"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveBatch(List<String> batch, List<EnterpriseConnector.ConnectionWrapper> connectionWrappers, String doneFilePath) throws Exception { for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { String sql = "DELETE FROM observation WHERE id = ?"; Connection connection = connectionWrapper.getConnection(); PreparedStatement ps = connection.prepareStatement(sql); for (String id: batch) { ps.setLong(1, Long.parseLong(id)); ps.addBatch(); } ps.executeBatch(); connection.commit(); ps.close(); connection.close(); } //update audit Files.write(new File(doneFilePath).toPath(), batch, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); batch.clear(); } /*private static void testS3Listing(String path) { LOG.info("Testing S3 Listing"); try { LOG.info("Trying with full path: " + path); List<FileInfo> l = FileHelper.listFilesInSharedStorageWithInfo(path); LOG.info("Found " + l.size()); *//*for (FileInfo info: l) { LOG.info("Got " + info.getFilePath()); }*//* String parent = FilenameUtils.getFullPath(path); LOG.info("Trying with parent: " + parent); l = FileHelper.listFilesInSharedStorageWithInfo(parent); LOG.info("Found " + l.size()); *//*for (FileInfo info: l) { LOG.info("Got " + info.getFilePath()); }*//* LOG.info("Finished Testing S3 Listing"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testAuditingFile(UUID serviceId, UUID systemId, UUID exchangeId, String version, String filePath) { LOG.info("Testing Auditing File"); try { LOG.info("Creating parser"); //org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation obsParser = new org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation(serviceId, systemId, exchangeId, version, filePath); org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile obsParser = new org.endeavourhealth.transform.tpp.csv.schema.staff.SRStaffMemberProfile(serviceId, systemId, exchangeId, version, filePath); LOG.info("Created parser"); obsParser.nextRecord(); LOG.info("Done auditing"); obsParser.close(); LOG.info("Closed"); LOG.info("Finish Testing Auditing File"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void postPatientToProtocol(String odsCode, String patientUuid) { LOG.info("Posting patient " + patientUuid + " for " + odsCode + " to Protocol queue"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); UUID patientId = UUID.fromString(patientUuid); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); //exchanges are in order most recent first, so iterate backwards to get them in date order for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<UUID> batchesForPatient = new ArrayList<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch: batches) { if (batch.getEdsPatientId() != null && batch.getEdsPatientId().equals(patientId)) { batchesForPatient.add(batch.getBatchId()); } } if (!batchesForPatient.isEmpty()) { LOG.debug("Posting " + batchesForPatient.size() + " for exchange " + exchange.getId() + " to rabbit"); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchesForPatient.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished posting patient " + patientUuid + " for " + odsCode + " to Protocol queue"); } catch (Throwable t) { LOG.error("", t); } }*/ private static void postPatientsToProtocol(UUID serviceId, UUID systemId, String sourceFile) { try { LOG.info("Posting patient from " + sourceFile + " for " + serviceId + " to Protocol queue"); Set<UUID> hsPatientUuids = new HashSet<>(); List<String> lines = Files.readAllLines(new File(sourceFile).toPath()); for (String line: lines) { hsPatientUuids.add(UUID.fromString(line)); } LOG.info("Found " + hsPatientUuids.size() + " patient IDs"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getById(serviceId); LOG.info("Service " + service.getId() + " -> " + service.getName()); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); //exchanges are in order most recent first, so iterate backwards to get them in date order for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<UUID> batchesForPatient = new ArrayList<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null && hsPatientUuids.contains(patientId)) { batchesForPatient.add(batch.getBatchId()); } } if (!batchesForPatient.isEmpty()) { LOG.debug("Posting " + batchesForPatient.size() + " for exchange " + exchange.getId() + " to rabbit"); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchesForPatient.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished posting patients from " + sourceFile + " for " + serviceId + " to Protocol queue"); } catch (Throwable t) { LOG.error("", t); } } /*private static void testXml() { LOG.info("Testing XML"); try { //PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Map<String, String> queueReadConfigs = ConfigManager.getConfigurations("queuereader"); for (String configId: queueReadConfigs.keySet()) { LOG.debug("Checking config XML for " + configId); String configXml = queueReadConfigs.get(configId); if (configXml.startsWith("{")) { LOG.debug("Skipping JSON"); continue; } try { ApiConfiguration config = ConfigWrapper.deserialise(configXml); //LOG.debug("Deserialised as messaging API XML"); ApiConfiguration.PostMessageAsync postConfig = config.getPostMessageAsync(); } catch (Exception ex) { try { QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); } catch (Exception ex2) { LOG.error(configXml); LOG.error("", ex2); } } } LOG.info("Testing XML"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testMetrics() { LOG.info("Testing Metrics"); try { Random r = new Random(System.currentTimeMillis()); while (true) { String metric1 = "frailty-api.ms-duration"; Integer value1 = new Integer(r.nextInt(1000)); MetricsHelper.recordValue(metric1, value1); if (r.nextBoolean()) { MetricsHelper.recordEvent("frailty-api.response-code-200"); } else { MetricsHelper.recordEvent("frailty-api.response-code-400"); } int sleep = r.nextInt(10 * 1000); LOG.debug("Waiting " + sleep + " ms"); Thread.sleep(sleep); } } catch (Throwable t) { LOG.error("", t); } } private static void testGraphiteMetrics(String host, String port) { LOG.info("Testing Graphite metrics to " + host + " " + port); try { InetAddress ip = InetAddress.getLocalHost(); String hostname = ip.getHostName(); LOG.debug("Hostname = " + hostname); String appId = ConfigManager.getAppId(); LOG.debug("AppID = " + appId); Random r = new Random(System.currentTimeMillis()); while (true) { Map<String, Object> metrics = new HashMap<>(); String metric1 = hostname + "." + appId + ".frailty-api.duration-ms"; Integer value1 = new Integer(r.nextInt(1000)); metrics.put(metric1, value1); String metric2 = hostname + "." + appId+ ".frailty-api.response-code"; Integer value2 = null; if (r.nextBoolean()) { value2 = new Integer(200); } else { value2 = new Integer(400); } metrics.put(metric2, value2); long timestamp = System.currentTimeMillis() / 1000; LOG.debug("Sending metrics"); sendMetrics(host, Integer.parseInt(port), metrics, timestamp); int sleep = r.nextInt(10 * 1000); LOG.debug("Waiting " + sleep + " ms"); Thread.sleep(sleep); } } catch (Throwable t) { LOG.error("", t); } } private static void sendMetrics(String graphiteHost, int graphitePort, Map<String, Object> metrics, long timeStamp) throws Exception { Socket socket = new Socket(graphiteHost, graphitePort); OutputStream s = socket.getOutputStream(); PrintWriter out = new PrintWriter(s, true); for (Map.Entry<String, Object> metric: metrics.entrySet()) { if (metric.getValue() instanceof Integer) { out.printf("%s %d %d%n", metric.getKey(), ((Integer)metric.getValue()).intValue(), timeStamp); } else if (metric.getValue() instanceof Float) { out.printf("%s %f %d%n", metric.getKey(), ((Float)metric.getValue()).floatValue(), timeStamp); } else { throw new RuntimeException("Unsupported type " + metric.getValue().getClass()); } } out.close(); socket.close(); }*/ /*private static void fixEmisDeletedPatients(String odsCode) { LOG.info("Fixing Emis Deleted Patients for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); Set<String> hsPatientGuidsDeductedDeceased = new HashSet<>(); Map<String, List<UUID>> hmPatientGuidsDeleted = new HashMap<>(); Map<String, List<String>> hmPatientGuidsToFix = new HashMap<>(); //exchanges are in REVERSE order (most recent first) for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = findFileOfType(files, "Admin_Patient"); if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } ExchangePayloadFile agreementFile = findFileOfType(files, "Agreements_SharingOrganisation"); if (agreementFile == null) { throw new Exception("Failed to find Agreements_SharingOrganisation file in exchange " + exchange.getId()); } //work out file version List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); filesTmp.add(agreementFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); //see if sharing agreement is disabled String path = agreementFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation agreementParser = new org.endeavourhealth.transform.emis.csv.schema.agreements.SharingOrganisation(serviceId, systemId, exchange.getId(), version, path); agreementParser.nextRecord(); CsvCell disabled = agreementParser.getDisabled(); boolean isDisabled = disabled.getBoolean(); //create the parser path = patientFile.getPath(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); CsvCell dateOfDeathCell = parser.getDateOfDeath(); CsvCell dateOfDeductionCell = parser.getDateOfDeactivation(); CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { List<UUID> exchangesDeleted = hmPatientGuidsDeleted.get(patientGuid); if (exchangesDeleted == null) { exchangesDeleted = new ArrayList<>(); hmPatientGuidsDeleted.put(patientGuid, exchangesDeleted); } exchangesDeleted.add(exchange.getId()); //if this patient was previously updated with a deduction date or date of death, and the sharing //agreement isn't disabled, then we will have deleted them and need to undelete if (hsPatientGuidsDeductedDeceased.contains(patientGuid) && !isDisabled) { List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid); if (exchangesToFix == null) { exchangesToFix = new ArrayList<>(); hmPatientGuidsToFix.put(patientGuid, exchangesToFix); } exchangesToFix.add(exchange.getId().toString() + ": Deducted/Dead and Deleted after"); } } else { //if the date of death of deduction is set then we need to track this //because we're going to possibly get a delete in a years time if (!dateOfDeathCell.isEmpty() || !dateOfDeductionCell.isEmpty()) { hsPatientGuidsDeductedDeceased.add(patientGuid); } else { hsPatientGuidsDeductedDeceased.remove(patientGuid); } //if this patient was previously deleted and is now UN-deleted, then we'll //need to fix the record if (hmPatientGuidsDeleted.containsKey(patientGuid)) { List<UUID> exchangesDeleted = hmPatientGuidsDeleted.remove(patientGuid); List<String> exchangesToFix = hmPatientGuidsToFix.get(patientGuid); if (exchangesToFix == null) { exchangesToFix = new ArrayList<>(); hmPatientGuidsToFix.put(patientGuid, exchangesToFix); } for (UUID exchangeId: exchangesDeleted) { exchangesToFix.add(exchangeId.toString() + ": Deleted and subsequently undeleted"); } } } } parser.close(); } LOG.info("Finished checking for affected patients - found " + hmPatientGuidsToFix.size() + " patients to fix"); for (String patientGuid: hmPatientGuidsToFix.keySet()) { List<String> exchangeIds = hmPatientGuidsToFix.get(patientGuid); LOG.info("Patient " + patientGuid); for (String exchangeId: exchangeIds) { LOG.info(" Exchange Id " + exchangeId); } //log out the UUID for the patient too EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, null, null, false, null); Reference ref = ReferenceHelper.createReference(ResourceType.Patient, patientGuid); ref = IdHelper.convertLocallyUniqueReferenceToEdsReference(ref, csvHelper); LOG.debug(" Patient UUID " + ref.getReference()); String patientUuidStr = ReferenceHelper.getReferenceId(ref); UUID patientUuid = UUID.fromString(patientUuidStr); Set<UUID> hsExchangeIdsDone = new HashSet<>(); Set<String> resourcesDone = new HashSet<>(); for (String exchangeId: exchangeIds) { UUID exchangeUuid = UUID.fromString(exchangeId.split(":")[0]); //in some cases, the same exchange was found twice if (hsExchangeIdsDone.contains(exchangeUuid)) { continue; } hsExchangeIdsDone.add(exchangeUuid); Exchange exchange = exchangeDal.getExchange(exchangeUuid); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); List<UUID> batchIdsCreated = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler filer = new FhirResourceFiler(exchangeUuid, serviceId, systemId, transformError, batchIdsCreated); //get all exchange batches for our patient List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeUuid); for (ExchangeBatch batch: batches) { UUID batchPatient = batch.getEdsPatientId(); if (batchPatient == null || !batchPatient.equals(patientUuid)) { continue; } //get all resources for this batch List<ResourceWrapper> resourceWrappers = resourceDal.getResourcesForBatch(serviceId, batch.getBatchId()); //restore each resource for (ResourceWrapper resourceWrapper: resourceWrappers) { //if an exchange was processed multiple times, we might try to pick up the same resource twice, so skip it String resourceRef = ReferenceHelper.createResourceReference(resourceWrapper.getResourceType(), resourceWrapper.getResourceId().toString()); if (resourcesDone.contains(resourceRef)) { continue; } resourcesDone.add(resourceRef); List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceWrapper.getResourceType(), resourceWrapper.getResourceId()); //most recent is first ResourceWrapper mostRecent = history.get(0); if (!mostRecent.isDeleted()) { continue; } //find latest non-deleted version and save it over the deleted version for (ResourceWrapper historyItem: history) { if (!historyItem.isDeleted()) { org.hl7.fhir.instance.model.Resource resource = FhirSerializationHelper.deserializeResource(historyItem.getResourceData()); GenericBuilder builder = new GenericBuilder(resource); filer.savePatientResource(null, false, builder); break; } } } } filer.waitToFinish(); //set new batch ID in exchange header String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); //post new batch to protocol Q PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Fixing Emis Deleted Patients for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ private static ExchangePayloadFile findFileOfType(List<ExchangePayloadFile> files, String fileType) { for (ExchangePayloadFile file: files) { if (file.getType().equals(fileType)) { return file; } } return null; } /*private static void fixEmisEpisodes2(String odsCode) { LOG.info("Fixing Emis Episodes (2) for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal(); Set<String> patientGuidsDone = new HashSet<>(); //exchanges are in REVERSE order (most recent first) for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = null; for (ExchangePayloadFile file: files) { if (file.getType().equals("Admin_Patient")) { patientFile = file; break; } } if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } String path = patientFile.getPath(); List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { continue; } //skip patients already done CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); if (patientGuidsDone.contains(patientGuid)) { continue; } patientGuidsDone.add(patientGuid); //check we've not already converted this patient previously (i.e. re-running this conversion) CsvCell startDateCell = parser.getDateOfRegistration(); if (startDateCell.isEmpty()) { LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId()); startDateCell = CsvCell.factoryDummyWrapper("1900-01-01"); } //save internal ID map String key = patientGuidCell.getString(); String value = startDateCell.getString(); internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value); } parser.close(); } LOG.info("Finished Fixing Emis Episodes (2) for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisEpisodes1(String odsCode) { LOG.info("Fixing Emis Episodes (1) for " + odsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(odsCode); LOG.info("Service " + service.getId() + " -> " + service.getName()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); LOG.info("Found " + exchanges.size() + " exchanges"); InternalIdDalI internalIdDal = DalProvider.factoryInternalIdDal(); Set<String> patientGuidsDone = new HashSet<>(); //exchanges are in REVERSE order (most recent first) for (Exchange exchange: exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } //skip if we're ignoring old data boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } //find patient file ExchangePayloadFile patientFile = null; for (ExchangePayloadFile file: files) { if (file.getType().equals("Admin_Patient")) { patientFile = file; break; } } if (patientFile == null) { throw new Exception("Failed to find Admin_Patient file in exchange " + exchange.getId()); } String path = patientFile.getPath(); List<ExchangePayloadFile> filesTmp = new ArrayList<>(); filesTmp.add(patientFile); String version = EmisCsvToFhirTransformer.determineVersion(filesTmp); org.endeavourhealth.transform.emis.csv.schema.admin.Patient parser = new org.endeavourhealth.transform.emis.csv.schema.admin.Patient(serviceId, systemId, exchange.getId(), version, path); while (parser.nextRecord()) { CsvCell deletedCell = parser.getDeleted(); if (deletedCell.getBoolean()) { continue; } //skip patients already done CsvCell patientGuidCell = parser.getPatientGuid(); String patientGuid = patientGuidCell.getString(); if (patientGuidsDone.contains(patientGuid)) { continue; } patientGuidsDone.add(patientGuid); //check we've not already converted this patient previously (i.e. re-running this conversion) String key = patientGuidCell.getString(); String existingIdMapValue = internalIdDal.getDestinationId(serviceId, "Emis_Latest_Reg_Date", key); if (existingIdMapValue != null) { continue; } CsvCell startDateCell = parser.getDateOfRegistration(); if (startDateCell.isEmpty()) { LOG.error("Missing start date for patient " + patientGuid + " in exchange " + exchange.getId()); startDateCell = CsvCell.factoryDummyWrapper("1900-01-01"); } //find the existing UUID we've previously allocated String oldSourceId = patientGuid; UUID episodeUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.EpisodeOfCare, oldSourceId); if (episodeUuid == null) { LOG.error("Null episode UUID for old source ID " + oldSourceId + " in exchange " + exchange.getId()); continue; } //save ID reference mapping String newSourceId = patientGuid + ":" + startDateCell.getString(); UUID newEpisodeUuid = IdHelper.getOrCreateEdsResourceId(serviceId, ResourceType.EpisodeOfCare, newSourceId, episodeUuid); if (!newEpisodeUuid.equals(episodeUuid)) { throw new Exception("Failed to carry over UUID for episode. Old UUID was " + episodeUuid + " new UUID is " + newEpisodeUuid + " in exchange " + exchange.getId()); } //save internal ID map String value = startDateCell.getString(); internalIdDal.save(serviceId, "Emis_Latest_Reg_Date", key, value); } parser.close(); } LOG.info("Finished Fixing Emis Episodes (1) for " + odsCode); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testRabbit(String nodes, String username, String password, String sslProtocol, String exchangeName, String queueName) { LOG.info("Testing RabbitMQ Connectivity on " + nodes); LOG.info("SSL Protocol = " + sslProtocol); LOG.info("Exchange = " + exchangeName); LOG.info("Queue = " + queueName); try { //test publishing LOG.info("Testing publishing..."); com.rabbitmq.client.Connection publishConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol); Channel publishChannel = org.endeavourhealth.core.queueing.ConnectionManager.getPublishChannel(publishConnection, exchangeName); publishChannel.confirmSelect(); for (int i=0; i<5; i++) { Map<String, Object> headers = new HashMap<>(); headers.put("HeaderIndex", "" + i); AMQP.BasicProperties properties = new AMQP.BasicProperties() .builder() .deliveryMode(2) // Persistent message .headers(headers) .build(); String body = "MessageIndex = " + i; byte[] bytes = body.getBytes(); publishChannel.basicPublish( exchangeName, "All", //routing key properties, bytes); } publishChannel.close(); publishConnection.close(); LOG.info("...Finished testing publishing"); //test consuming LOG.info("Testing reading..."); com.rabbitmq.client.Connection readConnection = org.endeavourhealth.core.queueing.ConnectionManager.getConnection(username, password, nodes, sslProtocol); Channel readChannel = readConnection.createChannel(); readChannel.basicQos(1); Consumer consumer = new TestRabbitConsumer(readChannel); readChannel.basicConsume(queueName, false, "TestRabbitConsumer", false, true, null, consumer); LOG.info("Reader Connected (ctrl+c to close) will quit in 30s"); Thread.sleep(30 * 1000); LOG.info("Finished Testing RabbitMQ Connectivity"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void populateLastDataDate(int threads, int batchSize) { LOG.debug("Populating last data date"); try { int processed = 0; AtomicInteger fixed = new AtomicInteger(); ThreadPool threadPool = new ThreadPool(threads, batchSize); while (true) { String sql = "SELECT id FROM drewtest.exchange_ids WHERE done = 0 LIMIT " + batchSize; //LOG.debug("Getting new batch using: " + sql); EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager(); SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate(); Connection auditConnection = auditSession.connection(); Statement statement = auditConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); List<UUID> exchangeIds = new ArrayList<>(); while (rs.next()) { String s = rs.getString(1); //LOG.debug("Got back exchange ID " + s); exchangeIds.add(UUID.fromString(s)); } rs.close(); statement.close(); auditEntityManager.close(); for (UUID exchangeId: exchangeIds) { threadPool.submit(new PopulateDataDateCallable(exchangeId, fixed)); } List<ThreadPoolError> errs = threadPool.waitUntilEmpty(); if (!errs.isEmpty()) { LOG.debug("Got " + errs.size() + " errors"); for (ThreadPoolError err: errs) { LOG.error("", err.getException()); } break; } processed += exchangeIds.size(); LOG.debug("processed " + processed + " fixed " + fixed.get()); //if finished if (exchangeIds.size() < batchSize) { break; } } threadPool.waitAndStop(); LOG.debug("Finished Populating last data date"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixEmisMissingSlots(String serviceOdsCode) { LOG.debug("Fixing Emis Missing Slots for " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId()); List<UUID> systemIds = findSystemIds(service); if (systemIds.size() != 1) { throw new Exception("Found " + systemIds.size() + " for service"); } UUID systemId = systemIds.get(0); UUID serviceId = service.getId(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); Set<String> hsSlotsToSkip = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); File auditFile = new File("SlotAudit_" + serviceOdsCode + ".csv"); LOG.debug("Auditing to " + auditFile); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); if (exchangeConfig == null) { throw new Exception("Failed to find PostMessageToExchange config details for exchange EdsProtocol"); } //the list of exchanges is most-recent-first, so iterate backwards to do them in order for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); //skip exchanges that are for custom extracts if (files.size() <= 1) { continue; } boolean processPatientData = EmisCsvToFhirTransformer.shouldProcessPatientData(serviceId, files); if (!processPatientData) { continue; } ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(systemId); transformAudit.setExchangeId(exchange.getId()); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); String version = EmisCsvToFhirTransformer.determineVersion(files); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), null, processPatientData, null); //the processor is responsible for saving FHIR resources TransformError transformError = new TransformError(); List<UUID> batchIdsCreated = new ArrayList<>(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(exchange.getId(), serviceId, systemId, transformError, batchIdsCreated); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchange.getId(), files, version, parsers); try { //cache the practitioners for each session SessionUserTransformer.transform(parsers, fhirResourceFiler, csvHelper); Slot parser = (Slot) parsers.get(Slot.class); while (parser.nextRecord()) { //should this record be transformed? //the slots CSV contains data on empty slots too; ignore them CsvCell patientGuid = parser.getPatientGuid(); if (patientGuid.isEmpty()) { continue; } //the EMIS data contains thousands of appointments that refer to patients we don't have, so I'm explicitly //handling this here, and ignoring any Slot record that is in this state UUID patientEdsId = IdHelper.getEdsResourceId(fhirResourceFiler.getServiceId(), ResourceType.Patient, patientGuid.getString()); if (patientEdsId == null) { continue; } //see if this appointment has previously been transformed CsvCell slotGuid = parser.getSlotGuid(); String uniqueId = patientGuid.getString() + ":" + slotGuid.getString(); if (!hsSlotsToSkip.contains(uniqueId)) { //transform this slot record if no appt already exists for it boolean alreadyExists = false; UUID discoveryId = IdHelper.getEdsResourceId(serviceId, ResourceType.Slot, uniqueId); if (discoveryId != null) { List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Slot.toString(), discoveryId); if (!history.isEmpty()) { alreadyExists = true; } } if (alreadyExists) { hsSlotsToSkip.add(uniqueId); } } if (hsSlotsToSkip.contains(uniqueId)) { continue; } hsSlotsToSkip.add(uniqueId); try { LOG.debug("Creating slot for " + uniqueId); SlotTransformer.createSlotAndAppointment((Slot) parser, fhirResourceFiler, csvHelper); } catch (Exception ex) { fhirResourceFiler.logTransformRecordError(ex, parser.getCurrentState()); } } csvHelper.clearCachedSessionPractitioners(); fhirResourceFiler.failIfAnyErrors(); fhirResourceFiler.waitToFinish(); } catch (Throwable ex) { Map<String, String> args = new HashMap<>(); args.put(TransformErrorUtility.ARG_FATAL_ERROR, ex.getMessage()); TransformErrorUtility.addTransformError(transformError, ex, args); LOG.error("", ex); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIdsCreated.size())); if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); } //save our audit if something went wrong or was saved if (transformError.getError().size() > 0 || !batchIdsCreated.isEmpty()) { exchangeDal.save(transformAudit); } //send to Rabbit protocol queue if (!batchIdsCreated.isEmpty()) { //write batch ID to file, so we have an audit of what we created List<String> lines = new ArrayList<>(); for (UUID batchId : batchIdsCreated) { lines.add("\"" + exchange.getId() + "\",\"" + batchId + "\""); } Files.write(auditFile.toPath(), lines, StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); String batchesJson = ObjectMapperPool.getInstance().writeValueAsString(batchIdsCreated.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchesJson); //send to Rabbit PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } if (transformError.getError().size() > 0) { throw new Exception("Dropping out due to error in transform"); } } LOG.debug("Finished Fixing Emis Missing Slots for " + serviceOdsCode); } catch (Throwable t) { LOG.error("", t); } } private static void findBartsPersonIds(String sourceFile, UUID serviceUuid, UUID systemUuid, String dateCutoffStr, String destFile) { LOG.debug("Finding Barts person IDs for " + sourceFile); try { //read NHS numbers into memory Set<String> hsNhsNumbers = new HashSet<>(); List<String> listNhsNumbers = new ArrayList<>(); File src = new File(sourceFile); List<String> lines = Files.readAllLines(src.toPath()); for (String line : lines) { String s = line.trim(); hsNhsNumbers.add(s); listNhsNumbers.add(s); //maintain a list so we can preserve the ordering } LOG.debug("Looking for Person IDs for " + hsNhsNumbers.size() + " nhs numbers or any since " + dateCutoffStr); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date dateCutoff = sdf.parse(dateCutoffStr); Map<String, Set<String>> hmMatches = new HashMap<>(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile file : files) { String parentPath = new File(file.getPath()).getParent(); String parentDir = FilenameUtils.getBaseName(parentPath); Date extractDate = sdf.parse(parentDir); boolean inDateRange = !extractDate.before(dateCutoff); String type = file.getType(); if (type.equals("PPATI")) { PPATI parser = new PPATI(null, null, null, null, file.getPath()); while (parser.nextRecord()) { CsvCell nhsNumberCell = parser.getNhsNumber(); String nhsNumber = nhsNumberCell.getString(); nhsNumber = nhsNumber.replace("-", ""); if (hsNhsNumbers.contains(nhsNumber) || inDateRange) { CsvCell personIdCell = parser.getMillenniumPersonId(); String personId = personIdCell.getString(); Set<String> s = hmMatches.get(nhsNumber); if (s == null) { s = new HashSet<>(); hmMatches.put(nhsNumber, s); } s.add(personId); } } parser.close(); } else if (type.equals("PPALI")) { PPALI parser = new PPALI(null, null, null, null, file.getPath()); while (parser.nextRecord()) { CsvCell aliasCell = parser.getAlias(); //not going to bother trying to filter on alias type, since it won't hurt to include //extra patients, if they have an MRN that accidentally matches one of the NHS numbers being searched for String alias = aliasCell.getString(); if (hsNhsNumbers.contains(alias) || inDateRange) { //NHS numbers in PPALI don't have the extra hyphens CsvCell personIdCell = parser.getMillenniumPersonIdentifier(); String personId = personIdCell.getString(); Set<String> s = hmMatches.get(alias); if (s == null) { s = new HashSet<>(); hmMatches.put(alias, s); } s.add(personId); } } parser.close(); } else { //just ignore other file types } } } LOG.debug("" + hmMatches.size() + " / " + hsNhsNumbers.size() + " NHS numbers had person IDs found"); List<String> newLines = new ArrayList<>(); for (String nhsNumber : listNhsNumbers) { Set<String> personIds = hmMatches.get(nhsNumber); if (personIds == null) { LOG.error("Failed to find person ID for " + nhsNumber); continue; } newLines.add("#NHS " + nhsNumber + ":"); for (String personId : personIds) { newLines.add(personId); } } File dst = new File(destFile); if (dst.exists()) { dst.delete(); } Files.write(dst.toPath(), newLines); LOG.debug("Finished Finding Barts person IDs for " + sourceFile); } catch (Throwable t) { LOG.error("", t); } }*/ private static void createEmisDataTables() { LOG.debug("Creating Emis data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("Admin_Location"); fileTypes.add("Admin_OrganisationLocation"); fileTypes.add("Admin_Organisation"); fileTypes.add("Admin_Patient"); fileTypes.add("Admin_UserInRole"); fileTypes.add("Agreements_SharingOrganisation"); fileTypes.add("Appointment_SessionUser"); fileTypes.add("Appointment_Session"); fileTypes.add("Appointment_Slot"); fileTypes.add("CareRecord_Consultation"); fileTypes.add("CareRecord_Diary"); fileTypes.add("CareRecord_ObservationReferral"); fileTypes.add("CareRecord_Observation"); fileTypes.add("CareRecord_Problem"); fileTypes.add("Coding_ClinicalCode"); fileTypes.add("Coding_DrugCode"); fileTypes.add("Prescribing_DrugRecord"); fileTypes.add("Prescribing_IssueRecord"); fileTypes.add("Audit_PatientAudit"); fileTypes.add("Audit_RegistrationAudit"); for (String fileType : fileTypes) { createEmisDataTable(fileType); } LOG.debug("Finished Creating Emis data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createEmisDataTable(String fileType) throws Exception { ParserI parser = createParserForEmisFileType(fileType, null); if (parser == null) { return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; sql += ", "; sql += "extract_date datetime"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser) parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field : fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } /*private static void convertFhirAudits(String publisherConfigName, int threads, int batchSize) throws Exception { LOG.info("Converting FHIR audit for " + publisherConfigName); try { //find a suitable service ID UUID dummyServiceId = null; ServiceDalI serviceDal = DalProvider.factoryServiceDal(); for (Service s : serviceDal.getAll()) { if (s.getPublisherConfigName() != null && s.getPublisherConfigName().equalsIgnoreCase(publisherConfigName)) { dummyServiceId = s.getId(); LOG.info("Found sample service ID " + s.getId() + " " + s.getName() + " " + s.getLocalId()); break; } } EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //ensure all source files are mapped to published files LOG.debug("Mapping source files to published files"); String sql = "SELECT id, service_id, system_id, file_path, exchange_id, description" + " FROM source_file_mapping" + " WHERE new_published_file_id IS NULL"; PreparedStatement ps = connection.prepareStatement(sql); List<FileDesc> fileDescs = new ArrayList<>(); ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; FileDesc f = new FileDesc(); f.id = rs.getInt(col++); f.serviceId = UUID.fromString(rs.getString(col++)); f.systemId = UUID.fromString(rs.getString(col++)); f.filePath = rs.getString(col++); f.exchangeId = UUID.fromString(rs.getString(col++)); f.fileDesc = rs.getString(col++); fileDescs.add(f); } ps.close(); entityManager.close(); LOG.debug("Found " + fileDescs.size() + " files to map"); List<FileDesc> batch = new ArrayList<>(); for (int i = 0; i < fileDescs.size(); i++) { FileDesc f = fileDescs.get(i); Integer newFileAuditId = auditParser(f.serviceId, f.systemId, f.exchangeId, f.filePath, f.fileDesc); if (newFileAuditId == null) { continue; } f.newId = newFileAuditId; batch.add(f); if (batch.size() >= batchSize || i + 1 >= fileDescs.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); sql = "UPDATE source_file_mapping" + " SET new_published_file_id = ?" + " WHERE id = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (FileDesc toSave : batch) { int col = 1; ps.setInt(col++, toSave.newId); ps.setInt(col++, toSave.id); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); } if (i % 100 == 0) { LOG.debug("Audited " + i + " files"); } } LOG.info("Finished Converting FHIR audit for " + publisherConfigName); } catch (Throwable t) { LOG.error("", t); } }*/ private static UUID findSuitableServiceIdForPublisherConfig(String publisherConfigName) throws Exception { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); for (Service s: serviceDal.getAll()) { if (s.getPublisherConfigName() != null && s.getPublisherConfigName().equalsIgnoreCase(publisherConfigName)) { return s.getId(); } } throw new Exception("Failed to find suitable service ID for publisher [" + publisherConfigName + "]"); } private static void convertFhirAudits2(String publisherConfigName, String tempTable, int threads, int batchSize, boolean testMode) throws Exception { LOG.info("Converting FHIR audit for " + publisherConfigName); try { //find a suitable service ID UUID dummyServiceId = findSuitableServiceIdForPublisherConfig(publisherConfigName); ThreadPool threadPool = new ThreadPool(threads, 1000); int done = 0; while (true) { String sql = "SELECT c.resource_id, c.resource_type, c.created_at, m.version, m.mappings_json" + " FROM " + tempTable + " c" + " INNER JOIN resource_field_mappings m" + " ON c.resource_id = m.resource_id" + " AND c.resource_type = m.resource_type" + " AND c.created_at = m.created_at" + " WHERE c.done = false" + " LIMIT " + batchSize; Map<ResourceWrapper, ResourceFieldMappingAudit> map = new HashMap<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); ResultSet rs = ps.executeQuery(); while (rs.next()) { int col = 1; ResourceWrapper r = new ResourceWrapper(); r.setResourceId(UUID.fromString(rs.getString(col++))); r.setResourceType(rs.getString(col++)); r.setCreatedAt(new Date(rs.getTimestamp(col++).getTime())); r.setVersion(UUID.fromString(rs.getString(col++))); ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(rs.getString(col++)); map.put(r, audit); } ps.close(); entityManager.close(); boolean lastOne = map.size() < batchSize; for (ResourceWrapper r: map.keySet()) { ResourceFieldMappingAudit audit = map.get(r); ConvertFhirAuditCallable c = new ConvertFhirAuditCallable(testMode, dummyServiceId, audit, r); List<ThreadPoolError> errors = threadPool.submit(c); handleErrors(errors); } //now save everything List<ThreadPoolError> errors = threadPool.waitUntilEmpty(); handleErrors(errors); done += map.size(); if (!testMode) { entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); //save all audits sql = "UPDATE resource_field_mappings" + " SET mappings_json = ?" + " WHERE resource_id = ?" + " AND resource_type = ?" + " AND created_at = ?" + " AND version = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (ResourceWrapper r : map.keySet()) { ResourceFieldMappingAudit audit = map.get(r); String auditJson = audit.writeToJson(); int col = 1; ps.setString(col++, auditJson); ps.setString(col++, r.getResourceId().toString()); ps.setString(col++, r.getResourceType()); ps.setTimestamp(col++, new Timestamp(r.getCreatedAt().getTime())); ps.setString(col++, r.getVersion().toString()); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); //mark temp table as done sql = "UPDATE " + tempTable + " SET done = true" + " WHERE done = false" + " AND resource_id = ?" + " AND resource_type = ?" + " AND created_at = ?"; ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); for (ResourceWrapper r : map.keySet()) { int col = 1; ps.setString(col++, r.getResourceId().toString()); ps.setString(col++, r.getResourceType()); ps.setTimestamp(col++, new Timestamp(r.getCreatedAt().getTime())); ps.addBatch(); } ps.executeBatch(); entityManager.getTransaction().commit(); ps.close(); entityManager.close(); } if (done % 1000 == 0) { LOG.info("Done " + done); } if (lastOne || testMode) { break; } } LOG.info("Done " + done); LOG.info("Finished Converting FHIR audit for " + publisherConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void handleErrors(List<ThreadPoolError> errors) throws Exception { if (errors == null || errors.isEmpty()) { return; } //if we've had multiple errors, just throw the first one, since they'll most-likely be the same ThreadPoolError first = errors.get(0); Throwable cause = first.getException(); //the cause may be an Exception or Error so we need to explicitly //cast to the right type to throw it without changing the method signature if (cause instanceof Exception) { throw (Exception)cause; } else if (cause instanceof Error) { throw (Error)cause; } } static class ConvertFhirAuditCallable implements Callable { private Map<String, UUID> hmPublishers = null; private ResourceWrapper r; private ResourceFieldMappingAudit audit; private UUID dummyServiceId; private boolean testMode; public ConvertFhirAuditCallable(boolean testMode, UUID dummyServiceId, ResourceFieldMappingAudit audit, ResourceWrapper r) { this.testMode = testMode; this.dummyServiceId = dummyServiceId; this.audit = audit; this.r = r; } @Override public Object call() throws Exception { String auditJson = audit.writeToJson(); List<ResourceFieldMappingAudit.ResourceFieldMappingAuditRow> auditRows = audit.getAudits(); for (ResourceFieldMappingAudit.ResourceFieldMappingAuditRow auditRow: auditRows) { Long oldStyleAuditId = auditRow.getOldStyleAuditId(); //got some records with a mix of old and new-style audits so skip any rows that are new-style if (oldStyleAuditId == null) { continue; } //need to work out if it's one of the audits where the record ID is potentially on a different server boolean isPotentiallyOnAnotherServer = false; String desiredFileName = null; for (ResourceFieldMappingAudit.ResourceFieldMappingAuditCol auditCol: auditRow.getCols()) { if (r.getResourceType().equals(ResourceType.MedicationOrder.toString()) || r.getResourceType().equals(ResourceType.MedicationStatement.toString())) { if (auditCol.getField().equals("medicationCodeableConcept.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "DrugCode"; } } else if (r.getResourceType().equals(ResourceType.Observation.toString()) || r.getResourceType().equals(ResourceType.Condition.toString()) || r.getResourceType().equals(ResourceType.Procedure.toString()) || r.getResourceType().equals(ResourceType.DiagnosticReport.toString())) { if (auditCol.getField().equals("code.text") || auditCol.getField().equals("component[1].code.text") || auditCol.getField().equals("component[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.AllergyIntolerance.toString())) { if (auditCol.getField().equals("substance.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.FamilyMemberHistory.toString())) { if (auditCol.getField().equals("condition[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Immunization.toString())) { if (auditCol.getField().equals("vaccineCode.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.DiagnosticOrder.toString())) { if (auditCol.getField().equals("item[0].code.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.ReferralRequest.toString())) { if (auditCol.getField().equals("serviceRequested[0].text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Specimen.toString())) { if (auditCol.getField().equals("type.text")) { isPotentiallyOnAnotherServer = true; desiredFileName = "ClinicalCode"; } } else if (r.getResourceType().equals(ResourceType.Location.toString())) { if (auditCol.getField().equals("managingOrganization.reference")) { isPotentiallyOnAnotherServer = true; desiredFileName = "OrganisationLocation"; } else { isPotentiallyOnAnotherServer = true; desiredFileName = "Location"; } } else if (r.getResourceType().equals(ResourceType.Organization.toString())) { isPotentiallyOnAnotherServer = true; desiredFileName = "Organisation"; } else if (r.getResourceType().equals(ResourceType.Practitioner.toString())) { isPotentiallyOnAnotherServer = true; desiredFileName = "UserInRole"; } if (isPotentiallyOnAnotherServer) { break; } } List<Integer> newIds = null; if (isPotentiallyOnAnotherServer) { newIds = findNewAuditIdOnAnyServer(oldStyleAuditId, desiredFileName); } else { newIds = findNewAuditIdOnThisServer(oldStyleAuditId); } Integer newFileAuditId = newIds.get(0); Integer newRecordNum = newIds.get(1); auditRow.setOldStyleAuditId(null); auditRow.setFileId(newFileAuditId.intValue()); auditRow.setRecord(newRecordNum.intValue()); } if (testMode) { String newAuditJson = audit.writeToJson(); String str = "Testing " + r.getResourceType() + " " + r.getResourceId() + " version " + r.getVersion() + " from " + r.getCreatedAt() + "\nOld JSON:" + "\n" + auditJson + "\nNew JSON:" + "\n" + newAuditJson; LOG.info(str); } return null; } private List<Integer> findNewAuditIdOnThisServer(Long oldStyleAuditId) throws Exception { EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(dummyServiceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //need to convert oldStyleID to a fileID and record number String sql = "select r.source_location, f.new_published_file_id" + " from source_file_record r" + " inner join source_file_mapping f" + " on f.id = r.source_file_id" + " where r.id = ?"; PreparedStatement ps = connection.prepareStatement(sql); ps.setLong(1, oldStyleAuditId.longValue()); ResultSet rs = ps.executeQuery(); if (!rs.next()) { throw new Exception("Failed to find source record details for old style audit ID " + oldStyleAuditId + " in audit for " + r.getResourceType() + " " + r.getResourceId() + " from " + r.getCreatedAt()); } int col = 1; String recordNumStr = rs.getString(col++); int newPublishedFileId = rs.getInt(col++); ps.close(); entityManager.close(); List<Integer> ret = new ArrayList<>(); ret.add(new Integer(newPublishedFileId)); ret.add(Integer.valueOf(recordNumStr)); return ret; } private List<Integer> findNewAuditIdOnAnyServer(Long oldStyleAuditId, String desiredFileName) throws Exception { if (hmPublishers == null) { Map<String, UUID> map = new HashMap<>(); List<String> publishers = new ArrayList<>(); publishers.add("publisher_01"); publishers.add("publisher_02"); publishers.add("publisher_03"); publishers.add("publisher_04"); publishers.add("publisher_05"); for (String publisher: publishers) { UUID serviceId = findSuitableServiceIdForPublisherConfig(publisher); map.put(publisher, serviceId); } hmPublishers = map; } Integer foundRecordNum = null; Integer foundPublishedFileId = null; String foundOnPublisher = null; for (String publisher: hmPublishers.keySet()) { UUID serviceId = hmPublishers.get(publisher); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); //need to convert oldStyleID to a fileID and record number String sql = "select r.source_location, f.new_published_file_id" + " from source_file_record r" + " inner join source_file_mapping f" + " on f.id = r.source_file_id" + " where r.id = ?" + " and f.file_path LIKE '%" + desiredFileName + "%'"; PreparedStatement ps = connection.prepareStatement(sql); ps.setLong(1, oldStyleAuditId.longValue()); ResultSet rs = ps.executeQuery(); if (rs.next()) { int col = 1; String recordNumStr = rs.getString(col++); int newPublishedFileId = rs.getInt(col++); ps.close(); entityManager.close(); if (foundPublishedFileId == null) { foundPublishedFileId = new Integer(newPublishedFileId); foundRecordNum = Integer.valueOf(recordNumStr); foundOnPublisher = publisher; } else { LOG.error("Old style audit = " + oldStyleAuditId); LOG.error("On " + foundOnPublisher + " found " + foundPublishedFileId + " published file ID and record number " + foundRecordNum); LOG.error("On " + publisher + " found " + newPublishedFileId + " published file ID and record number " + recordNumStr); throw new Exception("Found more than one matching file for old-style audit ID " + oldStyleAuditId + " and desired file name " + desiredFileName + " over all servers"); } } } if (foundPublishedFileId == null) { throw new Exception("Failed to find published file ID and record number for old-style audit ID " + oldStyleAuditId + " and desired file name " + desiredFileName + " over all servers"); } List<Integer> ret = new ArrayList<>(); ret.add(foundPublishedFileId); ret.add(foundRecordNum); return ret; } } /*static class FileDesc { int id; UUID serviceId; UUID systemId; String filePath; UUID exchangeId; String fileDesc; int newId; } private static Integer auditParser(UUID serviceId, UUID systemId, UUID exchangeId, String filePath, String fileDesc) throws Exception { ParserI parser = createParser(serviceId, systemId, exchangeId, filePath, fileDesc); if (parser == null) { LOG.debug("No parser created for " + fileDesc + " " + filePath); return null; } Integer newId = parser.ensureFileAudited(); if (newId == null) { throw new Exception("Null new ID for auditing file " + filePath); } return new Integer(newId); } private static ParserI createParser(UUID serviceId, UUID systemId, UUID exchangeId, String filePath, String fileDesc) throws Exception { if (fileDesc.startsWith("Vision ")) { if (fileDesc.equals("Vision organisations file")) { return new Practice(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision staff file")) { return new Staff(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision patient file")) { return new Patient(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision encounter file")) { return new Encounter(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision referrals file")) { return new Referral(serviceId, systemId, exchangeId, null, filePath); } else if (fileDesc.equals("Vision journal file")) { return new Journal(serviceId, systemId, exchangeId, null, filePath); } else { throw new Exception("Unknown vision file [" + fileDesc + "]"); } } if (fileDesc.equals("Bespoke Emis registration status extract") || fileDesc.equals("RegistrationStatus")) { String DATE_FORMAT = "dd/MM/yyyy"; String TIME_FORMAT = "hh:mm:ss"; CSVFormat CSV_FORMAT = CSVFormat.TDF .withHeader() .withEscape((Character)null) .withQuote((Character)null) .withQuoteMode(QuoteMode.MINIMAL); //ideally want Quote Mdde NONE, but validation in the library means we need to use this; List<String> possibleVersions = new ArrayList<>(); possibleVersions.add(RegistrationStatus.VERSION_WITH_PROCESSING_ID); possibleVersions.add(RegistrationStatus.VERSION_WITHOUT_PROCESSING_ID); RegistrationStatus testParser = new RegistrationStatus(null, null, null, null, filePath, CSV_FORMAT, DATE_FORMAT, TIME_FORMAT); possibleVersions = testParser.testForValidVersions(possibleVersions); String version = possibleVersions.get(0); return new RegistrationStatus(serviceId, systemId, exchangeId, version, filePath, CSV_FORMAT, DATE_FORMAT, TIME_FORMAT); } if (fileDesc.equals("OriginalTerms")) { String DATE_FORMAT2 = "dd/MM/yyyy"; String TIME_FORMAT2 = "hh:mm:ss"; CSVFormat CSV_FORMAT2 = CSVFormat.TDF .withHeader() .withEscape((Character)null) .withQuote((Character)null) .withQuoteMode(QuoteMode.MINIMAL); //ideally want Quote Mdde NONE, but validation in the library means we need to use this; return new OriginalTerms(serviceId, systemId, exchangeId, null, filePath, CSV_FORMAT2, DATE_FORMAT2, TIME_FORMAT2); } if (filePath.contains("EMIS")) { if (fileDesc.equals("Emis appointments file")) { fileDesc = "Slot"; } else if (fileDesc.equals("Emis appointments session file")) { fileDesc = "Session"; } else if (fileDesc.equals("Emis clinical code reference file")) { fileDesc = "ClinicalCode"; } else if (fileDesc.equals("Emis consultations file")) { fileDesc = "Consultation"; } else if (fileDesc.equals("Emis diary file")) { fileDesc = "Diary"; } else if (fileDesc.equals("Emis drug code reference file")) { fileDesc = "DrugCode"; } else if (fileDesc.equals("Emis drug record file")) { fileDesc = "DrugRecord"; } else if (fileDesc.equals("Emis issue records file")) { fileDesc = "IssueRecord"; } else if (fileDesc.equals("Emis observations file")) { fileDesc = "Observation"; } else if (fileDesc.equals("Emis organisation location file")) { fileDesc = "Location"; } else if (fileDesc.equals("Emis organisation-location link file")) { fileDesc = "OrganisationLocation"; } else if (fileDesc.equals("Emis organisations file")) { fileDesc = "Organisation"; } else if (fileDesc.equals("Emis patient file")) { fileDesc = "Patient"; } else if (fileDesc.equals("Emis problems file")) { fileDesc = "Problem"; } else if (fileDesc.equals("Emis referrals file")) { fileDesc = "ObservationReferral"; } else if (fileDesc.equals("Emis session-user link file")) { fileDesc = "SessionUser"; } else if (fileDesc.equals("Emis sharing agreements file")) { fileDesc = "SharingOrganisation"; } else if (fileDesc.equals("Emis staff file")) { fileDesc = "UserInRole"; } String fileType = null; switch (fileDesc) { case "ClinicalCode": fileType = "Coding_ClinicalCode"; break; case "Consultation": fileType = "CareRecord_Consultation"; break; case "Diary": fileType = "CareRecord_Diary"; break; case "DrugCode": fileType = "Coding_DrugCode"; break; case "DrugRecord": fileType = "Prescribing_DrugRecord"; break; case "IssueRecord": fileType = "Prescribing_IssueRecord"; break; case "Location": fileType = "Admin_Location"; break; case "Observation": fileType = "CareRecord_Observation"; break; case "ObservationReferral": fileType = "CareRecord_ObservationReferral"; break; case "Organisation": fileType = "Admin_Organisation"; break; case "OrganisationLocation": fileType = "Admin_OrganisationLocation"; break; case "Patient": fileType = "Admin_Patient"; break; case "Problem": fileType = "CareRecord_Problem"; break; case "Session": fileType = "Appointment_Session"; break; case "SessionUser": fileType = "Appointment_SessionUser"; break; case "SharingOrganisation": fileType = "Agreements_SharingOrganisation"; break; case "Slot": fileType = "Appointment_Slot"; break; case "UserInRole": fileType = "Admin_UserInRole"; break; default: throw new Exception("Unknown file type [" + fileDesc + "]"); } *//*String prefix = TransformConfig.instance().getSharedStoragePath(); prefix += "/"; if (!filePath.startsWith(prefix)) { throw new Exception("File path [" + filePath + "] doesn't start with " + prefix); } filePath = filePath.substring(prefix.length());*//* ExchangePayloadFile p = new ExchangePayloadFile(); p.setPath(filePath); p.setType(fileType); List<ExchangePayloadFile> files = new ArrayList<>(); files.add(p); String version = EmisCsvToFhirTransformer.determineVersion(files); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); Iterator<AbstractCsvParser> it = parsers.values().iterator(); return it.next(); } if (filePath.contains("BARTSDW")) { return null; } throw new Exception("Unknown file desc [" + fileDesc + "] for " + filePath); }*/ /*private static void moveS3ToAudit(int threads) { LOG.info("Moving S3 to Audit"); try { //list S3 contents List<FileInfo> files = FileHelper.listFilesInSharedStorageWithInfo("s3://discoveryaudit/audit"); LOG.debug("Found " + files.size() + " audits"); int countPerThread = files.size() / threads; int pos = 0; AtomicInteger done = new AtomicInteger(); List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<FileInfo> perThread = new ArrayList<>(); int countThisThread = countPerThread; if (i+1 == threads) { countThisThread = files.size() - pos; } for (int j=0; j<countThisThread; j++) { FileInfo fileInfo = files.get(pos); pos ++; perThread.add(fileInfo); } MoveToS3Runnable r = new MoveToS3Runnable(perThread, done); Thread t = new Thread(r); threadList.add(t); t.start(); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished with " + done.get() + " / " + files.size()); LOG.info("Finished Moving S3 to Audit"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void convertEmisGuids() { LOG.debug("Converting Emis Guid"); try { Map<String, String> map = new HashMap<>(); //this list of guids and dates is based off the live Emis extracts, giving the most recent bulk date for each organisation //only practices where the extract started before the move to AWS and where the extract was disabled and re-bulked need to be in here. //Practices disabled and re-bulked since the move to AWS are handled differently. map.put("{DD31E915-7076-46CF-99CD-8378AB588B69}", "20/07/2017"); map.put("{87A8851C-3DA4-4BE0-869C-3BF6BA7C0612}", "15/10/2017"); map.put("{612DCB3A-5BE6-4D50-909B-F0F20565F9FC}", "09/08/2017"); map.put("{15667F8D-46A0-4A87-9FA8-0C56B157A0A9}", "05/05/2017"); map.put("{3CFEFBF9-B856-4A40-A39A-4EB6FA39295E}", "31/01/2017"); map.put("{3F481450-AD19-4793-B1F0-40D5C2C57EF7}", "04/11/2017"); map.put("{83939542-20E4-47C5-9883-BF416294BB22}", "13/10/2017"); map.put("{73AA7E3A-4331-4167-8711-FE07DDBF4657}", "15/10/2017"); map.put("{3B703CCF-C527-4EC8-A802-00D3B1535DD0}", "01/02/2017"); map.put("{ED442CA3-351F-43E4-88A2-2EEACE39A402}", "13/10/2017"); map.put("{86537B5B-7CF3-4964-8906-7C10929FBC20}", "13/05/2017"); map.put("{9A4518C4-82CE-4509-8039-1B5F49F9C1FA}", "12/08/2017"); map.put("{16D7F8F9-4A35-44B1-8F1D-DD0162584684}", "11/07/2017"); map.put("{D392C499-345C-499B-898C-93F2CB8CC1B9}", "15/10/2017"); map.put("{5B87882A-0EE8-4233-93D0-D2F5F4F94040}", "15/03/2017"); map.put("{CFE3B460-9058-47FB-BF1D-6BEC13A2257D}", "19/04/2017"); map.put("{7B03E105-9275-47CC-8022-1469FE2D6AE4}", "20/04/2017"); map.put("{94470227-587C-47D7-A51F-9893512424D8}", "27/04/2017"); map.put("{734F4C99-6326-4CA4-A22C-632F0AC12FFC}", "17/10/2017"); map.put("{03C5B4B4-1A70-45F8-922E-135C826D48E0}", "20/04/2017"); map.put("{1BB17C3F-CE80-4261-AF6C-BE987E3A5772}", "09/05/2017"); map.put("{16F6DD42-2140-4395-95D5-3FA50E252896}", "20/04/2017"); map.put("{3B6FD632-3FFB-48E6-9775-287F6C486752}", "15/10/2017"); map.put("{F987F7BD-E19C-46D2-A446-913489F1BB7A}", "05/02/2017"); map.put("{BE7CC1DC-3CAB-4BB1-A5A2-B0C854C3B78E}", "06/07/2017"); map.put("{303EFA4E-EC8F-4CBC-B629-960E4D799E0D}", "15/10/2017"); map.put("{5EE8FD1F-F23A-4209-A1EE-556F9350C900}", "01/02/2017"); map.put("{04F6C555-A298-45F1-AC5E-AC8EBD2BB720}", "17/10/2017"); map.put("{67383254-F7F1-4847-9AA9-C7DCF32859B8}", "17/10/2017"); map.put("{31272E4E-40E0-4103-ABDC-F40A7B75F278}", "19/10/2017"); map.put("{09CA2E3B-7143-4999-9934-971F3F2E6D8C}", "15/10/2017"); map.put("{0527BCE2-4315-47F2-86A1-2E9F3E50399B}", "15/10/2017"); map.put("{16DD14B5-D1D5-4B0C-B886-59AC4DACDA7A}", "04/07/2017"); map.put("{411D0A79-6913-473C-B486-C01F6430D8A6}", "21/09/2017"); map.put("{0862FADA-594A-415E-B971-7A4312E0A58C}", "10/06/2017"); map.put("{249C3F3C-24F0-44CE-97A9-B535982BD70C}", "15/10/2017"); map.put("{5D7A1915-6E22-4B20-A8AE-4768C06D3BBF}", "28/09/2017"); //Barts community map.put("{131AE556-8B50-4C17-9D7D-A4B19F7B1FEA}", "15/10/2017"); //Aberfeldy practice F84698 map.put("{C0D2D0DF-EF78-444D-9A6D-B9EDEF5EF350}", "13/10/2017"); map.put("{F174B354-4156-4BCB-960F-35D0145075EA}", "01/02/2017"); map.put("{38600D63-1DE0-4910-8ED6-A38DC28A9DAA}", "19/02/2018"); //THE SPITALFIELDS PRACTICE (CDB 16);F84081 map.put("{B3ECA2DE-D926-4594-B0EA-CF2F28057CE1}", "19/10/2017"); map.put("{18F7C28B-2A54-4F82-924B-38C60631FFFA}", "04/02/2018"); //Rowans Surgery (CDB 18174);H85035 map.put("{16FB5EE8-5039-4068-BC42-1DB56DC2A530}", "08/06/2017"); map.put("{4BA4A5AC-7B25-40B2-B0EA-135702A72F9D}", "15/10/2017"); map.put("{01B8341F-BC8F-450E-8AFA-4CDA344A5009}", "15/10/2017"); map.put("{E6FBEA1C-BDA2-40B7-A461-C262103F08D7}", "08/06/2017"); map.put("{141C68EB-1BC8-4E99-A9D9-0E63A8944CA9}", "15/10/2017"); map.put("{A3EA804D-E7EB-43EE-8F1F-E860F6337FF7}", "15/10/2017"); map.put("{771B42CC-9C0C-46E2-8143-76F04AF91AD5}", "13/11/2017"); //cranwich road map.put("{16EA8D5C-C667-4818-B629-5D6F4300FEEF}", "11/05/2017"); map.put("{29E51964-C94D-4CB4-894E-EB18E27DEFC1}", "15/10/2017"); map.put("{3646CCA5-7FE4-4DFE-87CD-DA3CE1BA885D}", "27/09/2017"); map.put("{3EC82820-702F-4218-853B-D3E5053646A8}", "05/05/2017"); map.put("{37F3E676-B203-4329-97F8-2AF5BFEAEE5A}", "19/10/2017"); map.put("{A0E3208B-95E9-4284-9B5A-D4D387CCC9F9}", "07/06/2017"); map.put("{0BEAF1F0-9507-4AC2-8997-EC0BA1D0247E}", "19/10/2017"); map.put("{071A50E7-1764-4210-94EF-6A4BF96CF753}", "21/02/2017"); map.put("{0C1983D8-FB7D-4563-84D0-1F8F6933E786}", "20/07/2017"); map.put("{871FEEB2-CE30-4603-B9A3-6FA6CC47B5D4}", "15/10/2017"); map.put("{42906EBE-8628-486D-A52F-27B935C9937A}", "01/02/2017"); map.put("{1AB7ABF3-2572-4D07-B719-CFB2FE3AAC80}", "15/10/2017"); map.put("{E312A5B7-13E7-4E43-BE35-ED29F6216D3C}", "20/04/2017"); map.put("{55E60891-8827-40CD-8011-B0223D5C8970}", "15/10/2017"); map.put("{03A63F52-7FEE-4592-9B54-83CEBCF67B5D}", "26/04/2017"); map.put("{DB39B649-B48D-4AC2-BAB1-AC807AABFAC4}", "15/10/2017"); map.put("{0AF9B2AF-A0FB-40B0-BA05-743BA6845DB1}", "26/08/2017"); map.put("{A7600092-319C-4213-92C2-738BEEFC1609}", "31/01/2017"); map.put("{5A1AABA9-7E96-41E7-AF18-E02F4CF1DFB6}", "15/10/2017"); map.put("{7D8CE31D-66AA-4D6A-9EFD-313646BD1D73}", "15/10/2017"); map.put("{03EA4A79-B6F1-4524-9D15-992B47BCEC9A}", "15/10/2017"); map.put("{4588C493-2EA3-429A-8428-E610AE6A6D76}", "28/09/2017"); //Barts community map.put("{B13F3CC9-C317-4E0D-9C57-C545E4A53CAF}", "15/10/2017"); map.put("{463DA820-6EC4-48CB-B915-81B31AFBD121}", "13/10/2017"); map.put("{16F0D65C-B2A8-4186-B4E7-BBAF4390EC55}", "13/10/2017"); map.put("{0039EF15-2DCF-4F70-B371-014C807210FD}", "24/05/2017"); map.put("{E132BF05-78D9-4E4B-B875-53237E76A0FA}", "19/10/2017"); map.put("{3DFC2DA6-AD8C-4836-945D-A6F8DB22AA49}", "15/10/2017"); map.put("{BCB43B1D-2857-4186-918B-460620F98F81}", "13/10/2017"); map.put("{E134C74E-FA3E-4E14-A4BB-314EA3D3AC16}", "15/10/2017"); map.put("{C0F40044-C2CA-4D1D-95D3-553B29992385}", "26/08/2017"); map.put("{B174A018-538D-4065-838C-023A245B53DA}", "14/02/2017"); map.put("{43380A69-AE7D-4ED7-B014-0708675D0C02}", "08/06/2017"); map.put("{E503F0E0-FE56-4CEF-BAB5-0D25B834D9BD}", "13/10/2017"); map.put("{08946F29-1A53-4AF2-814B-0B8758112F21}", "07/02/2018"); //NEWHAM MEDICAL CENTRE (CDB 3461);F84669 map.put("{09857684-535C-4ED6-8007-F91F366611C6}", "19/10/2017"); map.put("{C409A597-009A-4E11-B828-A595755DE0EA}", "17/10/2017"); map.put("{58945A1C-2628-4595-8F8C-F75D93045949}", "15/10/2017"); map.put("{16FF2874-20B0-4188-B1AF-69C97055AA60}", "17/10/2017"); map.put("{2C91E9DA-3F92-464E-B6E6-61D3DE52E62F}", "15/10/2017"); map.put("{16E7AD27-2AD9-43C0-A473-1F39DF93E981}", "10/06/2017"); map.put("{A528478D-65DB-435C-9E98-F8BDB49C9279}", "20/04/2017"); map.put("{A2BDB192-E79C-44C5-97A2-1FD4517C456F}", "21/08/2017"); map.put("{73DFF193-E917-4DBC-B5CF-DD2797B29377}", "15/10/2017"); map.put("{62825316-9107-4E2C-A22C-86211B4760DA}", "13/10/2017"); map.put("{006E8A30-2A45-4DBE-91D7-1C53FADF38B1}", "28/01/2018"); //The Lawson Practice (CDB 4334);F84096 map.put("{E32AA6A6-46B1-4198-AA13-058038AB8746}", "13/10/2017"); map.put("{B51160F1-79E3-4BA7-AA3D-1112AB341146}", "30/09/2017"); map.put("{234503E5-56B4-45A0-99DA-39854FBE78E9}", "01/02/2017"); map.put("{7D1852DA-E264-4599-B9B4-8F40207F967D}", "09/10/2017"); map.put("{44716213-7FEE-4247-A09E-7285BD6B69C6}", "13/10/2017"); map.put("{19BCC870-2704-4D21-BA7B-56F2F472AF35}", "15/10/2017"); map.put("{FEF842DA-FD7C-480F-945A-D097910A81EB}", "13/10/2017"); map.put("{1C980E19-4A39-4ACD-BA8A-925D3E525765}", "13/10/2017"); map.put("{AABDDC3A-93A4-4A87-9506-AAF52E74012B}", "07/02/2018"); //DR N DRIVER AND PARTNERS (CDB 4419);F84086 map.put("{90C2959C-0C2D-43DC-A81B-4AD594C17999}", "20/04/2017"); map.put("{1F1669CF-1BB0-47A7-8FBF-BE65651644C1}", "15/10/2017"); map.put("{C1800BE8-4C1D-4340-B0F2-7ED208586ED3}", "15/10/2017"); map.put("{55A94703-4582-46FB-808A-1990E9CBCB6F}", "19/02/2018"); //Stamford Hill Group Practice (CDB 56);F84013 map.put("{D4996E62-268F-4759-83A6-7A68D0B38CEC}", "27/04/2017"); map.put("{3C843BBA-C507-4A95-9934-1A85B977C7B8}", "01/02/2017"); map.put("{2216253B-705D-4C46-ADB3-ED48493D6A39}", "03/02/2018"); //RIVERSIDE MEDICAL PRACTICE (CDB 14675);Y01962 map.put("{00123F97-4557-44AD-81B5-D9902DD72EE9}", "28/04/2017"); map.put("{E35D4D12-E7D2-484B-BFF6-4653B3FED228}", "15/10/2017"); map.put("{6D8B4D28-838B-4915-A148-6FEC2CEBCE77}", "05/07/2017"); map.put("{188D5B4D-4BF6-46E3-AF11-3AD32C68D251}", "19/10/2017"); map.put("{16F7DDE1-3763-4D3A-A58D-F12F967718CF}", "02/11/2017"); map.put("{03148933-6E1C-4A8A-A6D2-A3D488E14DDD}", "30/12/2017"); map.put("{16DE1A3C-875B-4AB2-B227-8A42604E029C}", "05/11/2017"); map.put("{D628D1BC-D02E-4101-B8CD-5B3DB2D06FC1}", "05/05/2017"); map.put("{1EA6259A-6A49-46DB-991D-D604675F87E2}", "15/10/2017"); map.put("{817F9B46-AEE0-45D5-95E3-989F75C4844E}", "20/04/2017"); map.put("{1C422471-F52A-4C30-8D23-140BEB7AAEFC}", "15/08/2017"); map.put("{A6467E73-0F15-49D6-AFAB-4DFB487E7963}", "10/05/2017"); map.put("{CC7D1781-1B85-4AD6-A5DD-9AD5E092E8DB}", "13/10/2017"); map.put("{167CD5C8-148F-4D78-8997-3B22EC0AF6B6}", "13/10/2017"); map.put("{9DD5D2CE-2585-49D8-AF04-2CB1BD137594}", "15/10/2017"); map.put("{D6696BB5-DE69-49D1-BC5E-C56799E42640}", "07/02/2018"); //BOLEYN MEDICAL CENTRE (CDB 4841);F84050 map.put("{169375A9-C3AB-4C5E-82B0-DFF7656AD1FA}", "20/04/2017"); map.put("{0A8ECFDE-95EE-4811-BC05-668D49F5C799}", "19/11/2017"); map.put("{79C898A1-BB92-48F9-B0C3-6725370132B5}", "20/10/2017"); map.put("{472AC9BA-AFFE-4E81-81CA-40DD8389784D}", "27/04/2017"); map.put("{00121CB7-76A6-4D57-8260-E9CA62FFCD77}", "13/10/2017"); map.put("{0FCBA0A7-7CAB-4E75-AC81-5041CD869CA1}", "15/10/2017"); map.put("{00A9C32D-2BB2-4A20-842A-381B3F2031C0}", "19/10/2017"); map.put("{26597C5A-3E29-4960-BE11-AC75D0430615}", "03/05/2017"); map.put("{D945FEF7-F5EF-422B-AB35-6937F9792B54}", "15/10/2017"); map.put("{16D685C6-130A-4B19-BCA9-90AC7DC16346}", "08/07/2017"); map.put("{F09E9CEF-2615-4C9D-AA3D-79E0AB10D0B3}", "13/10/2017"); map.put("{CD7EF748-DB88-49CF-AA6E-24F65029391F}", "15/10/2017"); map.put("{B22018CF-2B52-4A1A-9F6A-CEA13276DB2E}", "19/10/2017"); map.put("{0DF8CFC7-5DE6-4DDB-846A-7F28A2740A00}", "02/12/2017"); map.put("{50F439E5-DB18-43A0-9F25-825957013A07}", "11/01/2018"); //DR PI ABIOLA (CDB 5681);F84631 map.put("{00A3BA25-21C6-42DE-82AA-55FF0D85A6C3}", "31/10/2018"); //MARKET STREET HEALTH GROUP (CDB 381);F84004 map.put("{77B59D29-0FD9-4737-964F-5DBA49D94AB6}", "31/10/2018"); //Star Lane Medical Centre (CDB 40);F84017 map.put("{91239362-A105-4DEA-8E8E-239C3BCEDFD2}", "11/01/2018"); //BEECHWOOD MEDICAL CENTRE (CDB 5661);F84038 map.put("{53A113F5-6E3B-410F-A473-53E38A79335B}", "01/06/2018"); //ELFT Community RWKGY CDB 25362 map.put("{164BE8EC-E2D5-40DE-A5FC-25E058A5C47E}", "17/10/2018"); //Haiderian Medical Centre F82002 map.put("{164CE1B0-F7B3-44AF-B1E4-3DA6C64DEA4C}", "26/11/2018"); //THE GREEN WOOD PRACTICE F82007 map.put("{A30A4BB7-B17B-11D9-AD5F-00D0B77FCBFC}", "26/11/2018"); //Tulasi Medical Practice F82660 LOG.debug("Starting with map size " + map.size()); Map<String, String> hmGuidToOdsMap = new HashMap<>(); UUID systemId = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { UUID serviceId = service.getId(); String ods = service.getLocalId(); String orgGuid = null; List<Exchange> exchanges = exchangeDalI.getExchangesByService(serviceId, systemId, 5); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (!files.isEmpty()) { ExchangePayloadFile first = files.get(0); String path = first.getPath(); if (path.indexOf("EMIS_CUSTOM") > -1) { continue; } File f = new File(path); f = f.getParentFile(); //org GUID orgGuid = f.getName(); break; } } if (orgGuid == null) { LOG.error("Failed to find OrgGuid for " + service.getName() + " " + ods); } else { hmGuidToOdsMap.put(orgGuid, ods); } } //create new code for (String orgGuid: map.keySet()) { String dateStr = map.get(orgGuid); String odsCode = hmGuidToOdsMap.get(orgGuid); if (Strings.isNullOrEmpty(odsCode)) { LOG.error("Missing ODS code for " + orgGuid); } else { System.out.println("map.put(\"" + odsCode + "\", \"" + dateStr + "\");"); } } LOG.debug("Finished Converting Emis Guid"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testS3VsMySql(UUID serviceUuid, int count, int sqlBatchSize, String bucketName) { LOG.debug("Testing S3 vs MySQL for service " + serviceUuid); try { //retrieve some audit JSON from the DB EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); String sql = "select resource_id, resource_type, version, mappings_json" + " from resource_field_mappings" + " where mappings_json != '[]'"; if (count > -1) { sql += "limit " + count + ";"; } Statement statement = connection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); List<ResourceFieldMapping> list = new ArrayList<>(); while (rs.next()) { int col = 1; String resourceId = rs.getString(col++); String resourceType = rs.getString(col++); String version = rs.getString(col++); String json = rs.getString(col++); ResourceFieldMapping obj = new ResourceFieldMapping(); obj.setResourceId(UUID.fromString(resourceId)); obj.setResourceType(resourceType); obj.setVersion(UUID.fromString(version)); obj.setResourceField(json); list.add(obj); } rs.close(); statement.close(); entityManager.close(); int done = 0; //test writing to S3 long s3Start = System.currentTimeMillis(); LOG.debug("Doing S3 test"); for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); String entryName = mapping.getVersion().toString() + ".json"; String keyName = "auditTest/" + serviceUuid + "/" + mapping.getResourceType() + "/" + mapping.getResourceId() + "/" + mapping.getVersion() + ".zip"; String jsonStr = mapping.getResourceField(); //may as well zip the data, since it will compress well ByteArrayOutputStream baos = new ByteArrayOutputStream(); ZipOutputStream zos = new ZipOutputStream(baos); zos.putNextEntry(new ZipEntry(entryName)); zos.write(jsonStr.getBytes()); zos.flush(); zos.close(); byte[] bytes = baos.toByteArray(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes); //ProfileCredentialsProvider credentialsProvider = new ProfileCredentialsProvider(); DefaultAWSCredentialsProviderChain credentialsProvider = DefaultAWSCredentialsProviderChain.getInstance(); AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(credentialsProvider) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); ObjectMetadata objectMetadata = new ObjectMetadata(); objectMetadata.setSSEAlgorithm(ObjectMetadata.AES_256_SERVER_SIDE_ENCRYPTION); objectMetadata.setContentLength(bytes.length); PutObjectRequest putRequest = new PutObjectRequest(bucketName, keyName, byteArrayInputStream, objectMetadata); s3Client.putObject(putRequest); done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long s3End = System.currentTimeMillis(); LOG.debug("S3 took " + (s3End - s3Start) + " ms"); //test inserting into a DB long sqlStart = System.currentTimeMillis(); LOG.debug("Doing SQL test"); sql = "insert into drewtest.json_speed_test (resource_id, resource_type, created_at, version, mappings_json) values (?, ?, ?, ?, ?)"; entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); PreparedStatement ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); done = 0; int currentBatchSize = 0; for (int i=0; i<list.size(); i++) { ResourceFieldMapping mapping = list.get(i); int col = 1; ps.setString(col++, mapping.getResourceId().toString()); ps.setString(col++, mapping.getResourceType()); ps.setDate(col++, new java.sql.Date(System.currentTimeMillis())); ps.setString(col++, mapping.getVersion().toString()); ps.setString(col++, mapping.getResourceField()); ps.addBatch(); currentBatchSize ++; if (currentBatchSize >= sqlBatchSize || i+1 == list.size()) { ps.executeBatch(); entityManager.getTransaction().commit(); //mirror what would happen normally ps.close(); entityManager.close(); if (i+1 < list.size()) { entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceUuid); session = (SessionImpl) entityManager.getDelegate(); connection = session.connection(); ps = connection.prepareStatement(sql); entityManager.getTransaction().begin(); } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + list.size()); } } long sqlEnd = System.currentTimeMillis(); LOG.debug("SQL took " + (sqlEnd - sqlStart) + " ms"); LOG.debug("Finished Testing S3 vs MySQL for service " + serviceUuid); } catch (Throwable t) { LOG.error("", t); } }*/ private static void loadEmisData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String onlyThisFileType) { LOG.debug("Loading Emis data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdfStart = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdfStart.parse("2000-01-01"); for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file : files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } String name = FilenameUtils.getBaseName(path); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to find extract date in filename " + path); } String dateStr = toks[3]; SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss"); Date extractDate = sdf.parse(dateStr); boolean processFile = false; if (type.equalsIgnoreCase("OriginalTerms") || type.equalsIgnoreCase("RegistrationStatus")) { //can't process these custom files in this routine } else if (type.equalsIgnoreCase("Coding_ClinicalCode") || type.equalsIgnoreCase("Coding_DrugCode")) { processFile = true; } else { if (!extractDate.before(startDate)) { processFile = true; } } if (processFile) { loadEmisDataFromFile(conn, path, type, extractDate); } } } conn.close(); LOG.debug("Finished Emis data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static ParserI createParserForEmisFileType(String fileType, String filePath) { String[] toks = fileType.split("_"); String domain = toks[0]; String name = toks[1]; String first = domain.substring(0, 1); String last = domain.substring(1); domain = first.toLowerCase() + last; try { String clsName = "org.endeavourhealth.transform.emis.csv.schema." + domain + "." + name; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); return constructor.newInstance(null, null, null, EmisCsvToFhirTransformer.VERSION_5_4, filePath); } catch (Exception ex) { LOG.error("No parser for file type [" + fileType + "]"); LOG.error("", ex); return null; } } private static void loadEmisDataFromFile(Connection conn, String filePath, String fileType, Date extractDate) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = createParserForEmisFileType(fileType, filePath); if (parser == null) { return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name, extract_date"; List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?, ?"; for (String col : cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); ps.setDate(col++, new java.sql.Date(extractDate.getTime())); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr : currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } private static void createBartsDataTables() { LOG.debug("Creating Barts data tables"); try { List<String> fileTypes = new ArrayList<>(); fileTypes.add("AEATT"); fileTypes.add("Birth"); //fileTypes.add("BulkDiagnosis"); //fileTypes.add("BulkProblem"); //fileTypes.add("BulkProcedure"); fileTypes.add("CLEVE"); fileTypes.add("CVREF"); fileTypes.add("DIAGN"); fileTypes.add("Diagnosis"); fileTypes.add("ENCINF"); fileTypes.add("ENCNT"); fileTypes.add("FamilyHistory"); fileTypes.add("IPEPI"); fileTypes.add("IPWDS"); fileTypes.add("LOREF"); fileTypes.add("NOMREF"); fileTypes.add("OPATT"); fileTypes.add("ORDER"); fileTypes.add("ORGREF"); fileTypes.add("PPADD"); fileTypes.add("PPAGP"); fileTypes.add("PPALI"); fileTypes.add("PPATI"); fileTypes.add("PPINF"); fileTypes.add("PPNAM"); fileTypes.add("PPPHO"); fileTypes.add("PPREL"); fileTypes.add("Pregnancy"); fileTypes.add("Problem"); fileTypes.add("PROCE"); fileTypes.add("Procedure"); fileTypes.add("PRSNLREF"); fileTypes.add("SusEmergency"); fileTypes.add("SusInpatient"); fileTypes.add("SusOutpatient"); fileTypes.add("EventCode"); fileTypes.add("EventSetCanon"); fileTypes.add("EventSet"); fileTypes.add("EventSetExplode"); fileTypes.add("BlobContent"); fileTypes.add("SusInpatientTail"); fileTypes.add("SusOutpatientTail"); fileTypes.add("SusEmergencyTail"); fileTypes.add("AEINV"); fileTypes.add("AETRE"); fileTypes.add("OPREF"); fileTypes.add("STATREF"); fileTypes.add("RTTPE"); fileTypes.add("PPATH"); fileTypes.add("DOCRP"); fileTypes.add("SCHAC"); fileTypes.add("EALEN"); fileTypes.add("DELIV"); fileTypes.add("EALOF"); fileTypes.add("SusEmergencyCareDataSet"); fileTypes.add("SusEmergencyCareDataSetTail"); for (String fileType : fileTypes) { createBartsDataTable(fileType); } LOG.debug("Finished Creating Barts data tables"); } catch (Throwable t) { LOG.error("", t); } } private static void createBartsDataTable(String fileType) throws Exception { ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, null); } catch (ClassNotFoundException cnfe) { System.out.println("-- No parser for file type [" + fileType + "]"); return; } System.out.println("-- " + fileType); String table = fileType.replace(" ", "_"); String dropSql = "DROP TABLE IF EXISTS `" + table + "`;"; System.out.println(dropSql); String sql = "CREATE TABLE `" + table + "` ("; sql += "file_name varchar(100)"; if (parser instanceof AbstractFixedParser) { AbstractFixedParser fixedParser = (AbstractFixedParser) parser; List<FixedParserField> fields = fixedParser.getFieldList(); for (FixedParserField field : fields) { String col = field.getName(); int len = field.getFieldlength(); sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); sql += " varchar("; sql += len; sql += ")"; } } else { List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); if (col.equals("BLOB_CONTENTS") || col.equals("VALUE_LONG_TXT") || col.equals("COMMENT_TXT") || col.equals("NONPREG_REL_PROBLM_SCT_CD") || col.equals("ORDER_COMMENTS_TXT")) { sql += " mediumtext"; } else if (col.indexOf("Date") > -1 || col.indexOf("Time") > -1) { sql += " varchar(10)"; } else { sql += " varchar(255)"; } } } sql += ");"; /*LOG.debug("-- fileType"); LOG.debug(sql);*/ System.out.println(sql); } private static void loadBartsData(String serviceId, String systemId, String dbUrl, String dbUsername, String dbPassword, String startDateStr, String onlyThisFileType) { LOG.debug("Loading Barts data from into " + dbUrl); try { //hash file type of every file ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(UUID.fromString(serviceId), UUID.fromString(systemId), Integer.MAX_VALUE); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); Connection conn = DriverManager.getConnection(dbUrl, dbUsername, dbPassword); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); Date startDate = sdf.parse(startDateStr); for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); String exchangeBody = exchange.getBody(); List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchangeBody); if (files.isEmpty()) { continue; } for (ExchangePayloadFile file : files) { String type = file.getType(); String path = file.getPath(); //if only doing a specific file type, skip all others if (onlyThisFileType != null && !type.equals(onlyThisFileType)) { continue; } boolean processFile = false; if (type.equalsIgnoreCase("CVREF") || type.equalsIgnoreCase("LOREF") || type.equalsIgnoreCase("ORGREF") || type.equalsIgnoreCase("PRSNLREF") || type.equalsIgnoreCase("NOMREF")) { processFile = true; } else { File f = new File(path); File parentFile = f.getParentFile(); String parentDir = parentFile.getName(); Date extractDate = sdf.parse(parentDir); if (!extractDate.before(startDate)) { processFile = true; } /*if (!extractDate.before(startDate) && !extractDate.after(endDate)) { processFile = true; }*/ } if (processFile) { loadBartsDataFromFile(conn, path, type); } } } conn.close(); LOG.debug("Finished Loading Barts data from into " + dbUrl); } catch (Throwable t) { LOG.error("", t); } } private static void loadBartsDataFromFile(Connection conn, String filePath, String fileType) throws Exception { LOG.debug("Loading " + fileType + ": " + filePath); String fileName = FilenameUtils.getName(filePath); ParserI parser = null; try { String clsName = "org.endeavourhealth.transform.barts.schema." + fileType; Class cls = Class.forName(clsName); //now construct an instance of the parser for the file we've found Constructor<AbstractCsvParser> constructor = cls.getConstructor(UUID.class, UUID.class, UUID.class, String.class, String.class); parser = constructor.newInstance(null, null, null, null, filePath); } catch (ClassNotFoundException cnfe) { LOG.error("No parser for file type [" + fileType + "]"); return; } String table = fileType.replace(" ", "_"); //check table is there String sql = "SELECT 1 FROM information_schema.tables WHERE table_schema = database() AND table_name = '" + table + "' LIMIT 1"; Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(sql); boolean tableExists = rs.next(); rs.close(); statement.close(); if (!tableExists) { LOG.error("No table exists for " + table); return; } //create insert statement sql = "INSERT INTO `" + table + "` ("; sql += "file_name"; List<String> cols = parser.getColumnHeaders(); for (String col : cols) { sql += ", "; sql += col.replace(" ", "_").replace("#", "").replace("/", ""); } sql += ") VALUES ("; sql += "?"; for (String col : cols) { sql += ", "; sql += "?"; } sql += ")"; PreparedStatement ps = conn.prepareStatement(sql); List<String> currentBatchStrs = new ArrayList<>(); //load table try { int done = 0; int currentBatchSize = 0; while (parser.nextRecord()) { int col = 1; //file name is always first ps.setString(col++, fileName); for (String colName : cols) { CsvCell cell = parser.getCell(colName); if (cell == null) { ps.setNull(col++, Types.VARCHAR); } else { ps.setString(col++, cell.getString()); } } ps.addBatch(); currentBatchSize++; currentBatchStrs.add((ps.toString())); //for error handling if (currentBatchSize >= 5) { ps.executeBatch(); currentBatchSize = 0; currentBatchStrs.clear(); } done++; if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (currentBatchSize >= 0) { ps.executeBatch(); } ps.close(); } catch (Throwable t) { LOG.error("Failed on batch with statements:"); for (String currentBatchStr : currentBatchStrs) { LOG.error(currentBatchStr); } throw t; } LOG.debug("Finished " + fileType + ": " + filePath); } /*private static void fixPseudoIds(String subscriberConfig, int threads) { LOG.debug("Fixing Pseudo IDs for " + subscriberConfig); try { //update psuedo ID on patient table //update psuedo ID on person table //update pseudo ID on subscriber_transform mapping table JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } Connection subscriberConnection = EnterpriseFiler.openConnection(config); List<Long> patientIds = new ArrayList<>(); Map<Long, Long> hmOrgIds = new HashMap<>(); Map<Long, Long> hmPersonIds = new HashMap<>(); String sql = "SELECT id, organization_id, person_id FROM patient"; Statement statement = subscriberConnection.createStatement(); statement.setFetchSize(10000); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); long personId = rs.getLong(3); patientIds.add(new Long(patientId)); hmOrgIds.put(new Long(patientId), new Long(orgId)); hmPersonIds.put(new Long(patientId), new Long(personId)); } rs.close(); subscriberConnection.close(); LOG.debug("Found " + patientIds.size() + " patients"); AtomicInteger done = new AtomicInteger(); int pos = 0; List<Thread> threadList = new ArrayList<>(); for (int i=0; i<threads; i++) { List<Long> patientSubset = new ArrayList<>(); int count = patientIds.size() / threads; if (i+1 == threads) { count = patientIds.size() - pos; } for (int j=0; j<count; j++) { Long patientId = patientIds.get(pos); patientSubset.add(patientId); pos ++; } FixPseudoIdRunnable runnable = new FixPseudoIdRunnable(subscriberConfig, patientSubset, hmOrgIds, hmPersonIds, done); Thread t = new Thread(runnable); t.start(); threadList.add(t); } while (true) { Thread.sleep(5000); boolean allDone = true; for (Thread t: threadList) { if (t.getState() != Thread.State.TERMINATED) { //if (!t.isAlive()) { allDone = false; break; } } if (allDone) { break; } } LOG.debug("Finished Fixing Pseudo IDs for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } } static class FixPseudoIdRunnable implements Runnable { private String subscriberConfig = null; private List<Long> patientIds = null; private Map<Long, Long> hmOrgIds = null; private Map<Long, Long> hmPersonIds = null; private AtomicInteger done = null; public FixPseudoIdRunnable(String subscriberConfig, List<Long> patientIds, Map<Long, Long> hmOrgIds, Map<Long, Long> hmPersonIds, AtomicInteger done) { this.subscriberConfig = subscriberConfig; this.patientIds = patientIds; this.hmOrgIds = hmOrgIds; this.hmPersonIds = hmPersonIds; this.done = done; } @Override public void run() { try { doRun(); } catch (Throwable t) { LOG.error("", t); } } private void doRun() throws Exception { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Statement statement = subscriberConnection.createStatement(); JsonNode saltNode = config.get("pseudonymisation"); ObjectMapper mapper = new ObjectMapper(); Object json = mapper.readValue(saltNode.toString(), Object.class); String linkDistributors = mapper.writeValueAsString(json); LinkDistributorConfig salt = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig.class); LinkDistributorConfig[] arr = null; JsonNode linkDistributorsNode = config.get("linkedDistributors"); if (linkDistributorsNode != null) { json = mapper.readValue(linkDistributorsNode.toString(), Object.class); linkDistributors = mapper.writeValueAsString(json); arr = ObjectMapperPool.getInstance().readValue(linkDistributors, LinkDistributorConfig[].class); } //PseudoIdDalI pseudoIdDal = DalProvider.factoryPseudoIdDal(subscriberConfig); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); Statement subscriberTransformStatement = subscriberTransformConnection.createStatement(); String sql = null; ResultSet rs = null; for (Long patientId: patientIds) { Long orgId = hmOrgIds.get(patientId); Long personId = hmPersonIds.get(patientId); //find service ID sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); //find patient ID sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = subscriberTransformStatement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); if (!resourceType.equals("Patient")) { throw new Exception("Not a patient resource type for enterprise ID " + patientId); } //get patient Resource resource = null; try { resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.Patient, resourceId); } catch (Exception ex) { throw new Exception("Failed to get patient " + resourceId + " for service " + serviceId, ex); } if (resource == null) { LOG.error("Failed to find patient resource for " + ResourceType.Patient + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; //generate new pseudo ID String pseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, salt); //save to person if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE person" + " SET pseudo_id = null" + " WHERE id = " + personId; statement.executeUpdate(sql); } else { sql = "UPDATE person" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + personId; statement.executeUpdate(sql); } //save to patient if (Strings.isNullOrEmpty(pseudoId)) { sql = "UPDATE patient" + " SET pseudo_id = null" + " WHERE id = " + patientId; statement.executeUpdate(sql); } else { sql = "UPDATE patient" + " SET pseudo_id = '" + pseudoId + "'" + " WHERE id = " + patientId; statement.executeUpdate(sql); } //linked distributers if (arr != null) { for (LinkDistributorConfig linked: arr) { String linkedPseudoId = PatientTransformer.pseudonymiseUsingConfig(patient, linked); sql = "INSERT INTO link_distributor (source_skid, target_salt_key_name, target_skid) VALUES ('" + pseudoId + "', '" + linked.getSaltKeyName() + "', '" + linkedPseudoId + "')" + " ON DUPLICATE KEY UPDATE" + " target_salt_key_name = VALUES(target_salt_key_name)," + " target_skid = VALUES(target_skid)"; statement.executeUpdate(sql); } } //save to subscriber transform sql = "DELETE FROM pseudo_id_map WHERE patient_id = '" + resourceId + "'"; subscriberTransformStatement.executeUpdate(sql); if (!Strings.isNullOrEmpty(pseudoId)) { sql = "INSERT INTO pseudo_id_map (patient_id, pseudo_id) VALUES ('" + resourceId + "', '" + pseudoId + "')"; subscriberTransformStatement.executeUpdate(sql); } subscriberConnection.commit(); subscriberTransformConnection.commit(); int doneLocal = done.incrementAndGet(); if (doneLocal % 1000 == 0) { LOG.debug("Done " + doneLocal); } } statement.close(); subscriberTransformStatement.close(); subscriberConnection.close(); subscriberTransformConnection.close(); } }*/ /*private static void fixDeceasedPatients(String subscriberConfig) { LOG.debug("Fixing Deceased Patients for " + subscriberConfig); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfig, "db_subscriber"); Connection subscriberConnection = EnterpriseFiler.openConnection(config); Map<Long, Long> patientIds = new HashMap<>(); String sql = "SELECT id, organization_id FROM patient WHERE date_of_death IS NOT NULL"; Statement statement = subscriberConnection.createStatement(); ResultSet rs = statement.executeQuery(sql); while (rs.next()) { long patientId = rs.getLong(1); long orgId = rs.getLong(2); patientIds.put(new Long(patientId), new Long(orgId)); } rs.close(); statement.close(); EnterpriseAgeUpdaterlDalI dal = DalProvider.factoryEnterpriseAgeUpdaterlDal(subscriberConfig); EntityManager entityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfig); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection subscriberTransformConnection = session.connection(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); for (Long patientId: patientIds.keySet()) { Long orgId = patientIds.get(patientId); statement = subscriberTransformConnection.createStatement(); sql = "SELECT service_id FROM enterprise_organisation_id_map WHERE enterprise_id = " + orgId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find service iD for patient ID " + patientId + " and org ID " + orgId); } String serviceId = rs.getString(1); rs.close(); sql = "SELECT resource_type, resource_id FROM enterprise_id_map WHERE enterprise_id = " + patientId; rs = statement.executeQuery(sql); if (!rs.next()) { throw new Exception("Failed to find resource iD for patient ID " + patientId); } String resourceType = rs.getString(1); String resourceId = rs.getString(2); rs.close(); statement.close(); Resource resource = resourceDal.getCurrentVersionAsResource(UUID.fromString(serviceId), ResourceType.valueOf(resourceType), resourceId); if (resource == null) { LOG.error("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); continue; //throw new Exception("Failed to find patient resource for " + resourceType + " " + resourceId + ", service ID = " + serviceId + " and patient ID = " + patientId); } Patient patient = (Patient)resource; Date dob = patient.getBirthDate(); Date dod = patient.getDeceasedDateTimeType().getValue(); Integer[] ages = dal.calculateAgeValuesAndUpdateTable(patientId, dob, dod); updateEnterprisePatient(patientId, ages, subscriberConnection); updateEnterprisePerson(patientId, ages, subscriberConnection); } subscriberConnection.close(); subscriberTransformConnection.close(); LOG.debug("Finished Fixing Deceased Patients for " + subscriberConfig); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void updateEnterprisePatient(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //the enterprise patient database isn't managed using hibernate, so we need to simply write a simple update statement StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient SET "); sb.append("age_years = ?, "); sb.append("age_months = ?, "); sb.append("age_weeks = ? "); sb.append("WHERE id = ?"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); LOG.info("Updated patient " + enterprisePatientId + " to ages " + ages[EnterpriseAge.UNIT_YEARS] + " y, " + ages[EnterpriseAge.UNIT_MONTHS] + " m " + ages[EnterpriseAge.UNIT_WEEKS] + " wks"); } private static void updateEnterprisePerson(long enterprisePatientId, Integer[] ages, Connection connection) throws Exception { //update the age fields on the person table where the person is for our patient and their pseudo IDs match StringBuilder sb = new StringBuilder(); sb.append("UPDATE patient, person SET "); sb.append("person.age_years = ?, "); sb.append("person.age_months = ?, "); sb.append("person.age_weeks = ? "); sb.append("WHERE patient.id = ? "); sb.append("AND patient.person_id = person.id "); sb.append("AND patient.pseudo_id = person.pseudo_id"); PreparedStatement update = connection.prepareStatement(sb.toString()); if (ages[EnterpriseAge.UNIT_YEARS] == null) { update.setNull(1, Types.INTEGER); } else { update.setInt(1, ages[EnterpriseAge.UNIT_YEARS]); } if (ages[EnterpriseAge.UNIT_MONTHS] == null) { update.setNull(2, Types.INTEGER); } else { update.setInt(2, ages[EnterpriseAge.UNIT_MONTHS]); } if (ages[EnterpriseAge.UNIT_WEEKS] == null) { update.setNull(3, Types.INTEGER); } else { update.setInt(3, ages[EnterpriseAge.UNIT_WEEKS]); } update.setLong(4, enterprisePatientId); update.addBatch(); update.executeBatch(); connection.commit(); }*/ /*private static void testS3Read(String s3BucketName, String keyName, String start, String len) { LOG.debug("Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); try { AmazonS3ClientBuilder clientBuilder = AmazonS3ClientBuilder .standard() .withCredentials(DefaultAWSCredentialsProviderChain.getInstance()) .withRegion(Regions.EU_WEST_2); AmazonS3 s3Client = clientBuilder.build(); GetObjectRequest request = new GetObjectRequest(s3BucketName, keyName); long startInt = Long.parseLong(start); long lenInt = Long.parseLong(len); long endInt = startInt + lenInt; request.setRange(startInt, endInt); long startMs = System.currentTimeMillis(); S3Object object = s3Client.getObject(request); InputStream inputStream = object.getObjectContent(); InputStreamReader reader = new InputStreamReader(inputStream, Charset.defaultCharset()); StringBuilder sb = new StringBuilder(); char[] buf = new char[100]; while (true) { int read = reader.read(buf); if (read == -1 || sb.length() >= lenInt) { break; } sb.append(buf, 0, read); } reader.close(); long endMs = System.currentTimeMillis(); LOG.debug("Read " + sb.toString() + " in " + (endMs - startMs) + " ms"); LOG.debug("Finished Testing S3 Read from " + s3BucketName + " " + keyName + " from " + start + " " + len + " bytes"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void createTransforMap(UUID serviceId, String table, String outputFile) { LOG.debug("Creating transform map for " + serviceId + " from " + table); try { //retrieve from table EntityManager transformEntityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session2 = (SessionImpl)transformEntityManager.getDelegate(); Connection mappingConnection = session2.connection(); EntityManager ehrEntityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session3 = (SessionImpl)ehrEntityManager.getDelegate(); Connection ehrConnection = session3.connection(); String sql = "SELECT resource_type, resource_id, version FROM " + table; Statement statement = mappingConnection.createStatement(); statement.setFetchSize(1000); ResultSet rs = statement.executeQuery(sql); LOG.debug("Got resource IDs from DB"); Map<String, Map<String, List<String>>> hm = new HashMap<>(); int count = 0; //build map up per resource while (rs.next()) { String resourceType = rs.getString("resource_type"); String resourceId = rs.getString("resource_id"); String resourceVersion = rs.getString("version"); *//*sql = "SELECT * FROM resource_field_mappings WHERE version = 'a905db26-1357-4710-90ef-474f256567ed';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//* *//*sql = "SELECT * FROM resource_field_mappings WHERE version = ?"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql);*//* sql = "SELECT * FROM resource_field_mappings WHERE resource_type = '" + resourceType + "' AND resource_id = '" + resourceId + "' AND version = '" + resourceVersion + "';"; PreparedStatement statement1 = mappingConnection.prepareStatement(sql); //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //sql = "SELECT * FROM resource_field_mappings WHERE resource_type = ? AND resource_id = ? AND version = ?"; //statement1.setString(1, resourceVersion); *//*statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion);*//* ResultSet rs1 = null; try { rs1 = statement1.executeQuery(sql); } catch (Exception ex) { LOG.error("" + statement1); throw ex; } rs1.next(); String jsonStr = rs1.getString("mappings_json"); rs1.close(); statement1.close(); sql = "SELECT * FROM resource_history WHERE resource_type = ? AND resource_id = ? AND version = ?"; statement1 = ehrConnection.prepareStatement(sql); statement1.setString(1, resourceType); statement1.setString(2, resourceId); statement1.setString(3, resourceVersion); rs1 = statement1.executeQuery(); if (!rs1.next()) { throw new Exception("Failed to find resource_history for " + statement1.toString()); } String s = rs1.getString("resource_data"); rs1.close(); statement1.close(); if (Strings.isNullOrEmpty(s)) { continue; } JsonNode resourceJson = ObjectMapperPool.getInstance().readTree(s); Map<String, List<String>> hmResourceType = hm.get(resourceType); if (hmResourceType == null) { hmResourceType = new HashMap<>(); hm.put(resourceType, hmResourceType); } JsonNode json = ObjectMapperPool.getInstance().readTree(jsonStr); for (int i=0; i<json.size(); i++) { JsonNode child = json.get(i); JsonNode idNode = child.get("auditId"); JsonNode colsNode = child.get("cols"); if (idNode == null) { throw new Exception("No ID node in " + jsonStr); } if (colsNode == null) { throw new Exception("No cols node in " + jsonStr); } long id = idNode.asLong(); //get source file ID sql = "SELECT * FROM source_file_record WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, id); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileId = rs1.getLong("source_file_id"); rs1.close(); statement1.close(); //get source file type sql = "SELECT * FROM source_file WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileId); rs1 = statement1.executeQuery(); rs1.next(); long sourceFileType = rs1.getLong("source_file_type_id"); rs1.close(); statement1.close(); //get the type desc sql = "SELECT * FROM source_file_type WHERE id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); rs1.next(); String fileTypeDesc = rs1.getString("description"); rs1.close(); statement1.close(); //get the cols Map<Integer, String> hmCols = new HashMap<>(); sql = "SELECT * FROM source_file_type_column WHERE source_file_type_id = ?"; statement1 = mappingConnection.prepareStatement(sql); statement1.setLong(1, sourceFileType); rs1 = statement1.executeQuery(); while (rs1.next()) { int index = rs1.getInt("column_index"); String name = rs1.getString("column_name"); hmCols.put(new Integer(index), name); } rs1.close(); statement1.close(); for (int j=0; j<colsNode.size(); j++) { JsonNode colNode = colsNode.get(j); int col = colNode.get("col").asInt(); String jsonField = colNode.get("field").asText(); int index = jsonField.indexOf("["); while (index > -1) { int endIndex = jsonField.indexOf("]", index); String prefix = jsonField.substring(0, index + 1); String suffix = jsonField.substring(endIndex); if (prefix.equals("extension[")) { String val = jsonField.substring(index+1, endIndex); int extensionIndex = Integer.parseInt(val); JsonNode extensionArray = resourceJson.get("extension"); JsonNode extensionRoot = extensionArray.get(extensionIndex); String extensionUrl = extensionRoot.get("url").asText(); extensionUrl = extensionUrl.replace("http://endeavourhealth.org/fhir/StructureDefinition/", ""); extensionUrl = extensionUrl.replace("http://hl7.org/fhir/StructureDefinition/", ""); jsonField = prefix + extensionUrl + suffix; } else { jsonField = prefix + "n" + suffix; } index = jsonField.indexOf("[", endIndex); } String colName = hmCols.get(new Integer(col)); String fileTypeAndCol = fileTypeDesc + ":" + colName; List<String> fieldNameMappings = hmResourceType.get(jsonField); if (fieldNameMappings == null) { fieldNameMappings = new ArrayList<>(); hmResourceType.put(jsonField, fieldNameMappings); } if (!fieldNameMappings.contains(fileTypeAndCol)) { fieldNameMappings.add(fileTypeAndCol); } } } count ++; if (count % 500 == 0) { LOG.debug("Done " + count); } } LOG.debug("Done " + count); rs.close(); ehrEntityManager.close(); //create output file List<String> lines = new ArrayList<>(); List<String> resourceTypes = new ArrayList<>(hm.keySet()); Collections.sort(resourceTypes, String.CASE_INSENSITIVE_ORDER); for (String resourceType: resourceTypes) { lines.add("============================================================"); lines.add(resourceType); lines.add("============================================================"); Map<String, List<String>> hmResourceType = hm.get(resourceType); List<String> fields = new ArrayList<>(hmResourceType.keySet()); Collections.sort(fields, String.CASE_INSENSITIVE_ORDER); for (String field: fields) { String linePrefix = field + " = "; List<String> sourceRecords = hmResourceType.get(field); for (String sourceRecord: sourceRecords) { lines.add(linePrefix + sourceRecord); linePrefix = Strings.repeat(" ", linePrefix.length()); } lines.add(""); } lines.add(""); } File f = new File(outputFile); Path p = f.toPath(); Files.write(p, lines, Charset.defaultCharset(), StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING); LOG.debug("Finished creating transform map from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixBartsPatients(UUID serviceId) { LOG.debug("Fixing Barts patients at service " + serviceId); try { EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); int checked = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); String sql = "SELECT patient_id FROM patient_search WHERE service_id = '" + serviceId + "';"; Statement s = edsConnection.createStatement(); s.setFetchSize(10000); //don't get all rows at once ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { String patientId = rs.getString(1); ResourceWrapper wrapper = resourceDal.getCurrentVersion(serviceId, ResourceType.Patient.toString(), UUID.fromString(patientId)); if (wrapper == null) { LOG.error("Failed to get recource current for ID " + patientId); continue; } String oldJson = wrapper.getResourceData(); Patient patient = (Patient)FhirSerializationHelper.deserializeResource(oldJson); PatientBuilder patientBuilder = new PatientBuilder(patient); List<String> numbersFromCsv = new ArrayList<>(); if (patient.hasTelecom()) { for (ContactPoint contactPoint: patient.getTelecom()) { if (contactPoint.hasId()) { numbersFromCsv.add(contactPoint.getValue()); } } for (String numberFromCsv: numbersFromCsv) { PPPHOTransformer.removeExistingContactPointWithoutIdByValue(patientBuilder, numberFromCsv); } } List<HumanName> namesFromCsv = new ArrayList<>(); if (patient.hasName()) { for (HumanName name: patient.getName()) { if (name.hasId()) { namesFromCsv.add(name); } } for (HumanName name: namesFromCsv) { PPNAMTransformer.removeExistingNameWithoutIdByValue(patientBuilder, name); } } List<Address> addressesFromCsv = new ArrayList<>(); if (patient.hasAddress()) { for (Address address: patient.getAddress()) { if (address.hasId()) { addressesFromCsv.add(address); } } for (Address address: addressesFromCsv) { PPADDTransformer.removeExistingAddressWithoutIdByValue(patientBuilder, address); } } String newJson = FhirSerializationHelper.serializeResource(patient); if (!newJson.equals(oldJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } checked ++; if (checked % 1000 == 0) { LOG.debug("Checked " + checked + " fixed " + fixed); } } LOG.debug("Checked " + checked + " fixed " + fixed); rs.close(); s.close(); edsEntityManager.close(); LOG.debug("Finish Fixing Barts patients at service " + serviceId); } catch (Throwable t) { LOG.error("", t); } }*/ private static void postToRabbit(String exchangeName, String srcFile, Integer throttle) { LOG.info("Posting to " + exchangeName + " from " + srcFile); if (throttle != null) { LOG.info("Throttled to " + throttle + " messages/second"); } try { File src = new File(srcFile); //create file of ones done File dir = src.getParentFile(); String name = "DONE" + src.getName(); File dst = new File(dir, name); Set<UUID> hsAlreadyDone = new HashSet<>(); if (dst.exists()) { List<String> lines = Files.readAllLines(dst.toPath()); for (String line : lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); hsAlreadyDone.add(uuid); } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Already done " + hsAlreadyDone.size()); } List<UUID> exchangeIds = new ArrayList<>(); int countTotal = 0; List<String> lines = Files.readAllLines(src.toPath()); for (String line : lines) { if (!Strings.isNullOrEmpty(line)) { try { UUID uuid = UUID.fromString(line); countTotal++; if (!hsAlreadyDone.contains(uuid)) { exchangeIds.add(uuid); } } catch (Exception ex) { LOG.error("Skipping line " + line); } } } LOG.info("Found " + countTotal + " down to " + exchangeIds.size() + " skipping ones already done, to post to " + exchangeName); continueOrQuit(); FileWriter fileWriter = new FileWriter(dst, true); PrintWriter printWriter = new PrintWriter(fileWriter); long startMs = System.currentTimeMillis(); int doneThisSecond = 0; LOG.info("Posting " + exchangeIds.size() + " to " + exchangeName); for (int i = 0; i < exchangeIds.size(); i++) { UUID exchangeId = exchangeIds.get(i); List<UUID> tmp = new ArrayList<>(); tmp.add(exchangeId); QueueHelper.postToExchange(tmp, exchangeName, null, true); printWriter.println(exchangeId.toString()); printWriter.flush(); if (i % 5000 == 0) { LOG.debug("Done " + i + " / " + exchangeIds.size()); } if (throttle != null) { doneThisSecond++; if (doneThisSecond > throttle.intValue()) { long now = System.currentTimeMillis(); long sleep = 1000 - (now - startMs); if (sleep > 0) { Thread.sleep(sleep); } startMs = System.currentTimeMillis(); doneThisSecond = 0; } } } printWriter.close(); LOG.info("Finished Posting to " + exchangeName + " from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } private static void postExchangesToProtocol(String srcFile) { LOG.info("Posting to protocol from " + srcFile); try { List<UUID> exchangeIds = new ArrayList<>(); List<String> lines = Files.readAllLines(new File(srcFile).toPath()); for (String line: lines) { if (!Strings.isNullOrEmpty(line)) { UUID uuid = UUID.fromString(line); exchangeIds.add(uuid); } } LOG.info("Posting " + exchangeIds.size() + " to Protocol queue"); QueueHelper.postToExchange(exchangeIds, "EdsProtocol", null, false); LOG.info("Finished Posting to protocol from " + srcFile); } catch (Throwable t) { LOG.error("", t); } } /* create table uprn_pseudo_map ( uprn bigint, pseudo_uprn varchar(255), property_class varchar(10) ); */ private static void calculateUprnPseudoIds(String subscriberConfigName, String targetTable) throws Exception { LOG.info("Calculating UPRN Pseudo IDs " + subscriberConfigName); try { JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); JsonNode pseudoNode = config.get("pseudonymisation"); if (pseudoNode == null){ LOG.error("No salt key found!"); return; } JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); byte[] saltBytes = Base64.getDecoder().decode(base64Salt); EntityManager subscrberEntityManager = ConnectionManager.getSubscriberTransformEntityManager(subscriberConfigName); SessionImpl session = (SessionImpl) subscrberEntityManager.getDelegate(); Connection subscriberConnection = session.connection(); String upsertSql = "INSERT INTO " + targetTable + " (uprn, pseudo_uprn, property_class) VALUES (?, ?, ?)"; PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; int done = 0; EntityManager referenceEntityManager = ConnectionManager.getReferenceEntityManager(); session = (SessionImpl) referenceEntityManager.getDelegate(); Connection referenceConnection = session.connection(); String selectSql = "SELECT uprn, property_class FROM uprn_property_class"; PreparedStatement psSelect = referenceConnection.prepareStatement(selectSql); psSelect.setFetchSize(2000); LOG.info("Starting query on EDS database"); ResultSet rs = psSelect.executeQuery(); LOG.info("Got raw results back"); while (rs.next()) { long uprn = rs.getLong(1); String cls = rs.getString(2); String pseuoUprn = null; TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); psUpsert.setLong(1, uprn); psUpsert.setString(2, pseuoUprn); psUpsert.setString(3, cls); psUpsert.addBatch(); inBatch++; done++; if (inBatch >= TransformConfig.instance().getResourceSaveBatchSize()) { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } if (done % 5000 == 0) { LOG.debug("Done " + done); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.debug("Done " + done); psUpsert.close(); subscrberEntityManager.close(); psSelect.close(); referenceEntityManager.close(); LOG.info("Finished Calculating UPRN Pseudo IDs " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } private static void populateSubscriberUprnTable(String subscriberConfigName, Integer overrideBatchSize, String specificPatientId) throws Exception { LOG.info("Populating Subscriber UPRN Table for " + subscriberConfigName); try { int saveBatchSize = TransformConfig.instance().getResourceSaveBatchSize(); if (overrideBatchSize != null) { saveBatchSize = overrideBatchSize.intValue(); } JsonNode config = ConfigManager.getConfigurationAsJson(subscriberConfigName, "db_subscriber"); //changed the format of the JSON JsonNode pseudoNode = config.get("pseudonymisation"); boolean pseudonymised = pseudoNode != null; byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = pseudoNode.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); } /*boolean pseudonymised = config.get("pseudonymised").asBoolean(); byte[] saltBytes = null; if (pseudonymised) { JsonNode saltNode = config.get("salt"); String base64Salt = saltNode.asText(); saltBytes = Base64.getDecoder().decode(base64Salt); }*/ List<EnterpriseConnector.ConnectionWrapper> connectionWrappers = EnterpriseConnector.openConnection(subscriberConfigName); for (EnterpriseConnector.ConnectionWrapper connectionWrapper: connectionWrappers) { Connection subscriberConnection = connectionWrapper.getConnection(); LOG.info("Populating " + connectionWrapper); String upsertSql; if (pseudonymised) { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, pseudo_uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode, property_class)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " pseudo_uprn = VALUES(pseudo_uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)," + " property_class = VALUES(property_class)"; } else { upsertSql = "INSERT INTO patient_uprn" + " (patient_id, organization_id, person_id, lsoa_code, uprn, qualifier, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode, property_class)" + " VALUES" + " (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)" + " ON DUPLICATE KEY UPDATE" + " organization_id = VALUES(organization_id)," + " person_id = VALUES(person_id)," + " lsoa_code = VALUES(lsoa_code)," + " uprn = VALUES(uprn)," + " qualifier = VALUES(qualifier)," + " `algorithm` = VALUES(`algorithm`)," + " `match` = VALUES(`match`)," + " no_address = VALUES(no_address)," + " invalid_address = VALUES(invalid_address)," + " missing_postcode = VALUES(missing_postcode)," + " invalid_postcode = VALUES(invalid_postcode)," + " property_class = VALUES(property_class)"; } PreparedStatement psUpsert = subscriberConnection.prepareStatement(upsertSql); int inBatch = 0; EntityManager edsEntityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl) edsEntityManager.getDelegate(); Connection edsConnection = session.connection(); SubscriberResourceMappingDalI enterpriseIdDal = DalProvider.factorySubscriberResourceMappingDal(subscriberConfigName); PatientLinkDalI patientLinkDal = DalProvider.factoryPatientLinkDal(); PostcodeDalI postcodeDal = DalProvider.factoryPostcodeDal(); int checked = 0; int saved = 0; Map<String, Boolean> hmPermittedPublishers = new HashMap<>(); //join to the property class table - this isn't the best way of doing it as it will only work while //the reference and eds databases are on the same server //String sql = "SELECT service_id, patient_id, uprn, qualifier, abp_address, `algorithm`, `match`, no_address, invalid_address, missing_postcode, invalid_postcode FROM patient_address_uprn"; String sql = "SELECT a.service_id, a.patient_id, a.uprn, a.qualifier, a.abp_address, a.`algorithm`," + " a.`match`, a.no_address, a.invalid_address, a.missing_postcode, a.invalid_postcode, c.property_class" + " FROM patient_address_uprn a" + " LEFT OUTER JOIN reference.uprn_property_class c" + " ON c.uprn = a.uprn"; //support one patient at a time for debugging if (specificPatientId != null) { sql += " WHERE a.patient_id = '" + specificPatientId + "'"; LOG.debug("Restricting to patient " + specificPatientId); } Statement s = edsConnection.createStatement(); s.setFetchSize(2000); //don't get all rows at once LOG.info("Starting query on EDS database"); ResultSet rs = s.executeQuery(sql); LOG.info("Got raw results back"); while (rs.next()) { int col = 1; String serviceId = rs.getString(col++); String patientId = rs.getString(col++); Long uprn = rs.getLong(col++); if (rs.wasNull()) { uprn = null; } String qualifier = rs.getString(col++); String abpAddress = rs.getString(col++); String algorithm = rs.getString(col++); String match = rs.getString(col++); boolean noAddress = rs.getBoolean(col++); boolean invalidAddress = rs.getBoolean(col++); boolean missingPostcode = rs.getBoolean(col++); boolean invalidPostcode = rs.getBoolean(col++); String propertyClass = rs.getString(col++); //because of past mistakes, we have Discovery->Enterprise mappings for patients that //shouldn't, so we also need to check that the service ID is definitely a publisher to this subscriber Boolean isPublisher = hmPermittedPublishers.get(serviceId); if (isPublisher == null) { List<LibraryItem> libraryItems = LibraryRepositoryHelper.getProtocolsByServiceId(serviceId, null); //passing null means don't filter on system ID for (LibraryItem libraryItem : libraryItems) { Protocol protocol = libraryItem.getProtocol(); if (protocol.getEnabled() != ProtocolEnabled.TRUE) { continue; } //check to make sure that this service is actually a PUBLISHER to this protocol boolean isProtocolPublisher = false; for (ServiceContract serviceContract : protocol.getServiceContract()) { if (serviceContract.getType().equals(ServiceContractType.PUBLISHER) && serviceContract.getService().getUuid().equals(serviceId) && serviceContract.getActive() == ServiceContractActive.TRUE) { isProtocolPublisher = true; break; } } if (!isProtocolPublisher) { continue; } //check to see if this subscriber config is a subscriber to this DB for (ServiceContract serviceContract : protocol.getServiceContract()) { if (serviceContract.getType().equals(ServiceContractType.SUBSCRIBER) && serviceContract.getActive() == ServiceContractActive.TRUE) { ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); UUID subscriberServiceId = UUID.fromString(serviceContract.getService().getUuid()); UUID subscriberTechnicalInterfaceId = UUID.fromString(serviceContract.getTechnicalInterface().getUuid()); Service subscriberService = serviceRepository.getById(subscriberServiceId); List<ServiceInterfaceEndpoint> serviceEndpoints = subscriberService.getEndpointsList(); for (ServiceInterfaceEndpoint serviceEndpoint : serviceEndpoints) { if (serviceEndpoint.getTechnicalInterfaceUuid().equals(subscriberTechnicalInterfaceId)) { String protocolSubscriberConfigName = serviceEndpoint.getEndpoint(); if (protocolSubscriberConfigName.equals(subscriberConfigName)) { isPublisher = new Boolean(true); break; } } } } } } if (isPublisher == null) { isPublisher = new Boolean(false); } hmPermittedPublishers.put(serviceId, isPublisher); } if (specificPatientId != null) { LOG.debug("Org is publisher = " + isPublisher); } if (!isPublisher.booleanValue()) { continue; } //check if patient ID already exists in the subscriber DB Long subscriberPatientId = enterpriseIdDal.findEnterpriseIdOldWay(ResourceType.Patient.toString(), patientId); if (specificPatientId != null) { LOG.debug("Got patient " + patientId + " with UPRN " + uprn + " and property class " + propertyClass + " and subscriber patient ID " + subscriberPatientId); } //if the patient doesn't exist on this subscriber DB, then don't transform this record if (subscriberPatientId == null) { continue; } //see if the patient actually exists in the subscriber DB (might not if the patient is deleted or confidential) String checkSql = "SELECT id FROM patient WHERE id = ?"; Connection subscriberConnection2 = connectionWrapper.getConnection(); PreparedStatement psCheck = subscriberConnection2.prepareStatement(checkSql); psCheck.setLong(1, subscriberPatientId); ResultSet checkRs = psCheck.executeQuery(); boolean inSubscriberDb = checkRs.next(); psCheck.close(); subscriberConnection2.close(); if (!inSubscriberDb) { LOG.info("Skipping patient " + patientId + " -> " + subscriberPatientId + " as not found in enterprise DB"); continue; } SubscriberOrgMappingDalI orgMappingDal = DalProvider.factorySubscriberOrgMappingDal(subscriberConfigName); Long subscriberOrgId = orgMappingDal.findEnterpriseOrganisationId(serviceId); String discoveryPersonId = patientLinkDal.getPersonId(patientId); SubscriberPersonMappingDalI personMappingDal = DalProvider.factorySubscriberPersonMappingDal(subscriberConfigName); Long subscriberPersonId = personMappingDal.findOrCreateEnterprisePersonId(discoveryPersonId); String lsoaCode = null; if (!Strings.isNullOrEmpty(abpAddress)) { String[] toks = abpAddress.split(" "); String postcode = toks[toks.length - 1]; PostcodeLookup postcodeReference = postcodeDal.getPostcodeReference(postcode); if (postcodeReference != null) { lsoaCode = postcodeReference.getLsoaCode(); } } col = 1; psUpsert.setLong(col++, subscriberPatientId); psUpsert.setLong(col++, subscriberOrgId); psUpsert.setLong(col++, subscriberPersonId); psUpsert.setString(col++, lsoaCode); if (pseudonymised) { String pseuoUprn = null; if (uprn != null) { TreeMap<String, String> keys = new TreeMap<>(); keys.put("UPRN", "" + uprn); Crypto crypto = new Crypto(); crypto.SetEncryptedSalt(saltBytes); pseuoUprn = crypto.GetDigest(keys); } psUpsert.setString(col++, pseuoUprn); } else { if (uprn != null) { psUpsert.setLong(col++, uprn.longValue()); } else { psUpsert.setNull(col++, Types.BIGINT); } } psUpsert.setString(col++, qualifier); psUpsert.setString(col++, algorithm); psUpsert.setString(col++, match); psUpsert.setBoolean(col++, noAddress); psUpsert.setBoolean(col++, invalidAddress); psUpsert.setBoolean(col++, missingPostcode); psUpsert.setBoolean(col++, invalidPostcode); psUpsert.setString(col++, propertyClass); if (specificPatientId != null) { LOG.debug("" + psUpsert); } psUpsert.addBatch(); inBatch++; saved++; if (inBatch >= saveBatchSize) { try { psUpsert.executeBatch(); subscriberConnection.commit(); inBatch = 0; } catch (Exception ex) { LOG.error("Error saving UPRN for " + patientId + " -> " + subscriberPatientId + " for org " + subscriberOrgId); LOG.error("" + psUpsert); throw ex; } } checked++; if (checked % 1000 == 0) { LOG.info("Checked " + checked + " Saved " + saved); } } if (inBatch > 0) { psUpsert.executeBatch(); subscriberConnection.commit(); } LOG.info("Chcked " + checked + " Saved " + saved); psUpsert.close(); subscriberConnection.close(); edsEntityManager.close(); subscriberConnection.close(); } LOG.info("Finished Populating Subscriber UPRN Table for " + subscriberConfigName); } catch (Throwable t) { LOG.error("", t); } } /*private static void fixPersonsNoNhsNumber() { LOG.info("Fixing persons with no NHS number"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection patientSearchConnection = session.connection(); Statement patientSearchStatement = patientSearchConnection.createStatement(); for (Service service: services) { LOG.info("Doing " + service.getName() + " " + service.getId()); int checked = 0; int fixedPersons = 0; int fixedSearches = 0; String sql = "SELECT patient_id, nhs_number FROM patient_search WHERE service_id = '" + service.getId() + "' AND (nhs_number IS NULL or CHAR_LENGTH(nhs_number) != 10)"; ResultSet rs = patientSearchStatement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String nhsNumber = rs.getString(2); //find matched person ID String personIdSql = "SELECT person_id FROM patient_link WHERE patient_id = '" + patientId + "'"; Statement s = patientSearchConnection.createStatement(); ResultSet rsPersonId = s.executeQuery(personIdSql); String personId = null; if (rsPersonId.next()) { personId = rsPersonId.getString(1); } rsPersonId.close(); s.close(); if (Strings.isNullOrEmpty(personId)) { LOG.error("Patient " + patientId + " has no person ID"); continue; } //see whether person ID used NHS number to match String patientLinkSql = "SELECT nhs_number FROM patient_link_person WHERE person_id = '" + personId + "'"; s = patientSearchConnection.createStatement(); ResultSet rsPatientLink = s.executeQuery(patientLinkSql); String matchingNhsNumber = null; if (rsPatientLink.next()) { matchingNhsNumber = rsPatientLink.getString(1); } rsPatientLink.close(); s.close(); //if patient link person has a record for this nhs number, update the person link if (!Strings.isNullOrEmpty(matchingNhsNumber)) { String newPersonId = UUID.randomUUID().toString(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); String createdAtStr = sdf.format(new Date()); s = patientSearchConnection.createStatement(); //new record in patient link history String patientHistorySql = "INSERT INTO patient_link_history VALUES ('" + patientId + "', '" + service.getId() + "', '" + createdAtStr + "', '" + newPersonId + "', '" + personId + "')"; //LOG.debug(patientHistorySql); s.execute(patientHistorySql); //update patient link String patientLinkUpdateSql = "UPDATE patient_link SET person_id = '" + newPersonId + "' WHERE patient_id = '" + patientId + "'"; s.execute(patientLinkUpdateSql); patientSearchConnection.commit(); s.close(); fixedPersons ++; } //if patient search has an invalid NHS number, update it if (!Strings.isNullOrEmpty(nhsNumber)) { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(service.getId(), ResourceType.Patient, patientId); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); patientSearchDal.update(service.getId(), patient); fixedSearches ++; } checked ++; if (checked % 50 == 0) { LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); } } LOG.info("Checked " + checked + ", FixedPersons = " + fixedPersons + ", FixedSearches = " + fixedSearches); rs.close(); } patientSearchStatement.close(); entityManager.close(); LOG.info("Finished fixing persons with no NHS number"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void checkDeletedObs(UUID serviceId, UUID systemId) { LOG.info("Checking Observations for " + serviceId); try { ResourceDalI resourceDal = DalProvider.factoryResourceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); List<String> subscriberConfigs = new ArrayList<>(); subscriberConfigs.add("ceg_data_checking"); subscriberConfigs.add("ceg_enterprise"); subscriberConfigs.add("hurley_data_checking"); subscriberConfigs.add("hurley_deidentified"); Set<String> observationsNotDeleted = new HashSet<>(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); ExchangePayloadFile firstItem = payload.get(0); //String version = EmisCsvToFhirTransformer.determineVersion(payload); //if we've reached the point before we process data for this practice, break out try { if (!EmisCsvToFhirTransformer.shouldProcessPatientData(payload)) { break; } } catch (TransformException e) { LOG.info("Skipping exchange containing " + firstItem.getPath()); continue; } String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); Map<UUID, ExchangeBatch> hmBatchesByPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchange.getId()); for (ExchangeBatch batch : batches) { if (batch.getEdsPatientId() != null) { hmBatchesByPatient.put(batch.getEdsPatientId(), batch); } } for (ExchangePayloadFile item : payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String deleted = record.get("Deleted"); String observationId = record.get("ObservationGuid"); if (deleted.equalsIgnoreCase("true")) { //if observation was reinstated at some point, skip it if (observationsNotDeleted.contains(observationId)) { continue; } String patientId = record.get("PatientGuid"); CsvCell patientCell = CsvCell.factoryDummyWrapper(patientId); CsvCell observationCell = CsvCell.factoryDummyWrapper(observationId); Set<ResourceType> resourceTypes = org.endeavourhealth.transform.emis.csv.transforms.careRecord.ObservationTransformer.findOriginalTargetResourceTypes(csvHelper, patientCell, observationCell); for (ResourceType resourceType: resourceTypes) { //will already have been done OK if (resourceType == ResourceType.Observation) { continue; } String sourceId = patientId + ":" + observationId; UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { throw new Exception("Failed to find UUID for " + resourceType + " " + sourceId); } LOG.debug("Fixing " + resourceType + " " + uuid); //create file of IDs to delete for each subscriber DB for (String subscriberConfig : subscriberConfigs) { EnterpriseIdDalI subscriberDal = DalProvider.factoryEnterpriseIdDal(subscriberConfig); Long enterpriseId = subscriberDal.findEnterpriseId(resourceType.toString(), uuid.toString()); if (enterpriseId == null) { continue; } String sql = null; if (resourceType == ResourceType.AllergyIntolerance) { sql = "DELETE FROM allergy_intolerance WHERE id = " + enterpriseId; } else if (resourceType == ResourceType.ReferralRequest) { sql = "DELETE FROM referral_request WHERE id = " + enterpriseId; } else { sql = "DELETE FROM observation WHERE id = " + enterpriseId; } sql += "\n"; File f = new File(subscriberConfig + ".sql"); Files.write(f.toPath(), sql.getBytes(), StandardOpenOption.CREATE, StandardOpenOption.APPEND, StandardOpenOption.WRITE); } //delete resource if not already done ResourceWrapper resourceWrapper = resourceDal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (resourceWrapper != null && !resourceWrapper.isDeleted()) { ExchangeBatch batch = hmBatchesByPatient.get(resourceWrapper.getPatientId()); resourceWrapper.setDeleted(true); resourceWrapper.setResourceData(null); resourceWrapper.setResourceMetadata(""); resourceWrapper.setExchangeBatchId(batch.getBatchId()); resourceWrapper.setVersion(UUID.randomUUID()); resourceWrapper.setCreatedAt(new Date()); resourceWrapper.setExchangeId(exchange.getId()); resourceDal.delete(resourceWrapper); } } } else { observationsNotDeleted.add(observationId); } } parser.close(); } } } LOG.info("Finished Checking Observations for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void testBatchInserts(String url, String user, String pass, String num, String batchSizeStr) { LOG.info("Testing Batch Inserts"); try { int inserts = Integer.parseInt(num); int batchSize = Integer.parseInt(batchSizeStr); LOG.info("Openning Connection"); Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); //String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?);"; String sql = "INSERT INTO drewtest.insert_test VALUES (?, ?, ?)"; PreparedStatement ps = conn.prepareStatement(sql); if (batchSize == 1) { LOG.info("Testing non-batched inserts"); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.execute(); } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } else { LOG.info("Testing batched inserts with batch size " + batchSize); long start = System.currentTimeMillis(); for (int i = 0; i < inserts; i++) { int col = 1; ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, UUID.randomUUID().toString()); ps.setString(col++, randomStr()); ps.addBatch(); if ((i + 1) % batchSize == 0 || i + 1 >= inserts) { ps.executeBatch(); } } long end = System.currentTimeMillis(); LOG.info("Done " + inserts + " in " + (end - start) + " ms"); } ps.close(); conn.close(); LOG.info("Finished Testing Batch Inserts"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static String randomStr() { StringBuffer sb = new StringBuffer(); Random r = new Random(System.currentTimeMillis()); while (sb.length() < 1100) { sb.append(r.nextLong()); } return sb.toString(); } /*private static void fixEmisProblems(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems for " + serviceId); try { Map<String, List<String>> hmReferences = new HashMap<>(); Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Caching problem links"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); //String version = EmisCsvToFhirTransformer.determineVersion(payload); ExchangePayloadFile firstItem = payload.get(0); String name = FilenameUtils.getBaseName(firstItem.getPath()); String[] toks = name.split("_"); String agreementId = toks[4]; LOG.info("Doing exchange containing " + firstItem.getPath()); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchange.getId(), agreementId, true); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("CareRecord_Observation")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("ObservationGuid"); String localId = patientId + ":" + observationId; ResourceType resourceType = ObservationTransformer.findOriginalTargetResourceType(filer, CsvCell.factoryDummyWrapper(patientId), CsvCell.factoryDummyWrapper(observationId)); Reference localReference = ReferenceHelper.createReference(resourceType, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_DrugRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("DrugRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, localId); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else if (type.equals("Prescribing_IssueRecord")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String parentProblemId = record.get("ProblemObservationGuid"); String patientId = record.get("PatientGuid"); patientIds.add(patientId); if (!Strings.isNullOrEmpty(parentProblemId)) { String observationId = record.get("IssueRecordGuid"); String localId = patientId + ":" + observationId; Reference localReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, localId); String localProblemId = patientId + ":" + parentProblemId; Reference localProblemReference = ReferenceHelper.createReference(ResourceType.Condition, localProblemId); Reference globalProblemReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localProblemReference, csvHelper); Reference globalReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localReference, csvHelper); String globalProblemId = ReferenceHelper.getReferenceId(globalProblemReference); List<String> problemChildren = hmReferences.get(globalProblemId); if (problemChildren == null) { problemChildren = new ArrayList<>(); hmReferences.put(globalProblemId, problemChildren); } problemChildren.add(globalReference.getReference()); } } parser.close(); } else { //no problem link } } } LOG.info("Finished caching problem links, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), ResourceType.Condition.toString()); for (ResourceWrapper wrapper: wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); Condition condition = (Condition)FhirSerializationHelper.deserializeResource(originalJson); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(condition, FhirExtensionUri.PROBLEM_LAST_REVIEWED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_LAST_REVIEWED__PERFORMER); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } //sort out the contained list of children ContainedListBuilder listBuilder = new ContainedListBuilder(conditionBuilder); //remove any existing children listBuilder.removeContainedList(); //add all the new ones we've found List<String> localChildReferences = hmReferences.get(wrapper.getResourceId().toString()); if (localChildReferences != null) { for (String localChildReference: localChildReferences) { Reference reference = ReferenceHelper.createReference(localChildReference); listBuilder.addContainedListItem(reference); } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(condition); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed ++; } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEmisProblems3ForPublisher(String publisher, UUID systemId) { try { LOG.info("Doing fix for " + publisher); String[] done = new String[]{ "01fcfe94-5dfd-4951-b74d-129f874209b0", "07a267d3-189b-4968-b9b0-547de28edef5", "0b9601d1-f7ab-4f5d-9f77-1841050f75ab", "0fd2ff5d-2c25-4707-afe8-707e81a250b8", "14276da8-c344-4841-a36d-aa38940e78e7", "158251ca-0e1d-4471-8fae-250b875911e1", "160131e2-a5ff-49c8-b62e-ae499a096193", "16490f2b-62ce-44c6-9816-528146272340", "18fa1bed-b9a0-4d55-a0cc-dfc31831259a", "19cba169-d41e-424a-812f-575625c72305", "19ff6a03-25df-4e61-9ab1-4573cfd24729", "1b3d1627-f49e-4103-92d6-af6016476da3", "1e198fbb-c9cd-429a-9b50-0f124d0d825c", "20444fbe-0802-46fc-8203-339a36f52215", "21e27bf3-8071-48dd-924f-1d8d21f9216f", "23203e72-a3b0-4577-9942-30f7cdff358e", "23be1f4a-68ec-4a49-b2ec-aa9109c99dcd", "2b56033f-a9b4-4bab-bb53-c619bdb38895", "2ba26f2d-8068-4b77-8e62-431edfc2c2e2", "2ed89931-0ce7-49ea-88ac-7266b6c03be0", "3abf8ded-f1b1-495b-9a2d-5d0223e33fa7", "3b0f6720-2ffd-4f8a-afcd-7e3bb311212d", "415b509a-cf39-45bc-9acf-7f982a00e159", "4221276f-a3b0-4992-b426-ec2d8c7347f2", "49868211-d868-4b55-a201-5acac0be0cc0", "55fdcbd0-9b2d-493a-b874-865ccc93a156", "56124545-d266-4da9-ba1f-b3a16edc7f31", "6c11453b-dbf8-4749-a0ec-ab705920e316" }; ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> all = dal.getAll(); for (Service service: all) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { boolean alreadyDone = false; String idStr = service.getId().toString(); for (String doneId: done) { if (idStr.equalsIgnoreCase(doneId)) { alreadyDone = true; break; } } if (alreadyDone) { continue; } fixEmisProblems3(service.getId(), systemId); } } LOG.info("Done fix for " + publisher); } catch (Throwable t) { LOG.error("", t); } } private static void fixEmisProblems3(UUID serviceId, UUID systemId) { LOG.info("Fixing Emis Problems 3 for " + serviceId); try { Set<String> patientIds = new HashSet<>(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); FhirResourceFiler filer = new FhirResourceFiler(null, serviceId, systemId, null, null); LOG.info("Finding patients"); //Go through all files to work out problem children for every problem ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceId, systemId, Integer.MAX_VALUE); for (int i=exchanges.size()-1; i>=0; i--) { Exchange exchange = exchanges.get(i); List<ExchangePayloadFile> payload = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile item: payload) { String type = item.getType(); if (type.equals("Admin_Patient")) { InputStreamReader isr = FileHelper.readFileReaderFromSharedStorage(item.getPath()); CSVParser parser = new CSVParser(isr, EmisCsvToFhirTransformer.CSV_FORMAT); Iterator<CSVRecord> iterator = parser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientId = record.get("PatientGuid"); patientIds.add(patientId); } parser.close(); } } } LOG.info("Finished checking files, finding " + patientIds.size() + " patients"); int done = 0; int fixed = 0; for (String localPatientId: patientIds) { Reference localPatientReference = ReferenceHelper.createReference(ResourceType.Patient, localPatientId); Reference globalPatientReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(localPatientReference, filer); String patientUuid = ReferenceHelper.getReferenceId(globalPatientReference); List<ResourceType> potentialResourceTypes = new ArrayList<>(); potentialResourceTypes.add(ResourceType.Procedure); potentialResourceTypes.add(ResourceType.AllergyIntolerance); potentialResourceTypes.add(ResourceType.FamilyMemberHistory); potentialResourceTypes.add(ResourceType.Immunization); potentialResourceTypes.add(ResourceType.DiagnosticOrder); potentialResourceTypes.add(ResourceType.Specimen); potentialResourceTypes.add(ResourceType.DiagnosticReport); potentialResourceTypes.add(ResourceType.ReferralRequest); potentialResourceTypes.add(ResourceType.Condition); potentialResourceTypes.add(ResourceType.Observation); for (ResourceType resourceType: potentialResourceTypes) { List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, UUID.fromString(patientUuid), resourceType.toString()); for (ResourceWrapper wrapper : wrappers) { if (wrapper.isDeleted()) { continue; } String originalJson = wrapper.getResourceData(); DomainResource resource = (DomainResource)FhirSerializationHelper.deserializeResource(originalJson); //Also go through all observation records and any that have parent observations - these need fixing too??? Extension extension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PARENT_RESOURCE); if (extension != null) { Reference reference = (Reference)extension.getValue(); fixReference(serviceId, filer, reference, potentialResourceTypes); } if (resource instanceof Observation) { Observation obs = (Observation)resource; if (obs.hasRelated()) { for (Observation.ObservationRelatedComponent related: obs.getRelated()) { if (related.hasTarget()) { Reference reference = related.getTarget(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } if (resource instanceof DiagnosticReport) { DiagnosticReport diag = (DiagnosticReport)resource; if (diag.hasResult()) { for (Reference reference: diag.getResult()) { fixReference(serviceId, filer, reference, potentialResourceTypes); } } } //Go through all patients, go through all problems, for any child that's Observation, find the true resource type then update and save if (resource instanceof Condition) { if (resource.hasContained()) { for (Resource contained: resource.getContained()) { if (contained.getId().equals("Items")) { List_ containedList = (List_)contained; if (containedList.hasEntry()) { for (List_.ListEntryComponent entry: containedList.getEntry()) { Reference reference = entry.getItem(); fixReference(serviceId, filer, reference, potentialResourceTypes); } } } } } //sort out the nested extension references Extension outerExtension = ExtensionConverter.findExtension(resource, FhirExtensionUri.PROBLEM_RELATED); if (outerExtension != null) { Extension innerExtension = ExtensionConverter.findExtension(outerExtension, FhirExtensionUri._PROBLEM_RELATED__TARGET); if (innerExtension != null) { Reference performerReference = (Reference)innerExtension.getValue(); String value = performerReference.getReference(); if (value.endsWith("}")) { Reference globalPerformerReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(performerReference, filer); innerExtension.setValue(globalPerformerReference); } } } } //save the updated condition String newJson = FhirSerializationHelper.serializeResource(resource); if (!newJson.equals(originalJson)) { wrapper.setResourceData(newJson); saveResourceWrapper(serviceId, wrapper); fixed++; } } } done ++; if (done % 1000 == 0) { LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); } } LOG.info("Done " + done + " patients and fixed " + fixed + " problems"); LOG.info("Finished Emis Problems 3 for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } } private static boolean fixReference(UUID serviceId, HasServiceSystemAndExchangeIdI csvHelper, Reference reference, List<ResourceType> potentialResourceTypes) throws Exception { //if it's already something other than observation, we're OK ReferenceComponents comps = ReferenceHelper.getReferenceComponents(reference); if (comps.getResourceType() != ResourceType.Observation) { return false; } Reference sourceReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, reference); String sourceId = ReferenceHelper.getReferenceId(sourceReference); String newReferenceValue = findTrueResourceType(serviceId, potentialResourceTypes, sourceId); if (newReferenceValue == null) { return false; } reference.setReference(newReferenceValue); return true; } private static String findTrueResourceType(UUID serviceId, List<ResourceType> potentials, String sourceId) throws Exception { ResourceDalI dal = DalProvider.factoryResourceDal(); for (ResourceType resourceType: potentials) { UUID uuid = IdHelper.getEdsResourceId(serviceId, resourceType, sourceId); if (uuid == null) { continue; } ResourceWrapper wrapper = dal.getCurrentVersion(serviceId, resourceType.toString(), uuid); if (wrapper != null) { return ReferenceHelper.createResourceReference(resourceType, uuid.toString()); } } return null; }*/ /*private static void convertExchangeBody(UUID systemUuid) { try { LOG.info("Converting exchange bodies for system " + systemUuid); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { List<Exchange> exchanges = exchangeDal.getExchangesByService(service.getId(), systemUuid, Integer.MAX_VALUE); if (exchanges.isEmpty()) { continue; } LOG.debug("doing " + service.getName() + " with " + exchanges.size() + " exchanges"); for (Exchange exchange: exchanges) { String exchangeBody = exchange.getBody(); try { //already done ExchangePayloadFile[] files = JsonSerializer.deserialize(exchangeBody, ExchangePayloadFile[].class); continue; } catch (JsonSyntaxException ex) { //if the JSON can't be parsed, then it'll be the old format of body that isn't JSON } List<ExchangePayloadFile> newFiles = new ArrayList<>(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (String file: files) { ExchangePayloadFile fileObj = new ExchangePayloadFile(); String fileWithoutSharedStorage = file.substring(TransformConfig.instance().getSharedStoragePath().length()+1); fileObj.setPath(fileWithoutSharedStorage); //size List<FileInfo> fileInfos = FileHelper.listFilesInSharedStorageWithInfo(file); for (FileInfo info: fileInfos) { if (info.getFilePath().equals(file)) { long size = info.getSize(); fileObj.setSize(new Long(size)); } } //type if (systemUuid.toString().equalsIgnoreCase("991a9068-01d3-4ff2-86ed-249bd0541fb3") //live || systemUuid.toString().equalsIgnoreCase("55c08fa5-ef1e-4e94-aadc-e3d6adc80774")) { //dev //emis String name = FilenameUtils.getName(file); String[] toks = name.split("_"); String first = toks[1]; String second = toks[2]; fileObj.setType(first + "_" + second); *//* } else if (systemUuid.toString().equalsIgnoreCase("e517fa69-348a-45e9-a113-d9b59ad13095") || systemUuid.toString().equalsIgnoreCase("b0277098-0b6c-4d9d-86ef-5f399fb25f34")) { //dev //cerner String name = FilenameUtils.getName(file); if (Strings.isNullOrEmpty(name)) { continue; } try { String type = BartsCsvToFhirTransformer.identifyFileType(name); fileObj.setType(type); } catch (Exception ex2) { throw new Exception("Failed to parse file name " + name + " on exchange " + exchange.getId()); }*//* } else { throw new Exception("Unknown system ID " + systemUuid); } newFiles.add(fileObj); } String json = JsonSerializer.serialize(newFiles); exchange.setBody(json); exchangeDal.save(exchange); } } LOG.info("Finished Converting exchange bodies for system " + systemUuid); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixBartsOrgs(String serviceId) { try { LOG.info("Fixing Barts orgs"); ResourceDalI dal = DalProvider.factoryResourceDal(); List<ResourceWrapper> wrappers = dal.getResourcesByService(UUID.fromString(serviceId), ResourceType.Organization.toString()); LOG.debug("Found " + wrappers.size() + " resources"); int done = 0; int fixed = 0; for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { List<ResourceWrapper> history = dal.getResourceHistory(UUID.fromString(serviceId), wrapper.getResourceType(), wrapper.getResourceId()); ResourceWrapper mostRecent = history.get(0); String json = mostRecent.getResourceData(); Organization org = (Organization)FhirSerializationHelper.deserializeResource(json); String odsCode = IdentifierHelper.findOdsCode(org); if (Strings.isNullOrEmpty(odsCode) && org.hasIdentifier()) { boolean hasBeenFixed = false; for (Identifier identifier: org.getIdentifier()) { if (identifier.getSystem().equals(FhirIdentifierUri.IDENTIFIER_SYSTEM_ODS_CODE) && identifier.hasId()) { odsCode = identifier.getId(); identifier.setValue(odsCode); identifier.setId(null); hasBeenFixed = true; } } if (hasBeenFixed) { String newJson = FhirSerializationHelper.serializeResource(org); mostRecent.setResourceData(newJson); LOG.debug("Fixed Organization " + org.getId()); *//*LOG.debug(json); LOG.debug(newJson);*//* saveResourceWrapper(UUID.fromString(serviceId), mostRecent); fixed ++; } } } done ++; if (done % 100 == 0) { LOG.debug("Done " + done + ", Fixed " + fixed); } } LOG.debug("Done " + done + ", Fixed " + fixed); LOG.info("Finished Barts orgs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void testPreparedStatements(String url, String user, String pass, String serviceId) { try { LOG.info("Testing Prepared Statements"); LOG.info("Url: " + url); LOG.info("user: " + user); LOG.info("pass: " + pass); //open connection Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", user); props.setProperty("password", pass); Connection conn = DriverManager.getConnection(url, props); String sql = "SELECT * FROM internal_id_map WHERE service_id = ? AND id_type = ? AND source_id = ?"; long start = System.currentTimeMillis(); for (int i=0; i<10000; i++) { PreparedStatement ps = null; try { ps = conn.prepareStatement(sql); ps.setString(1, serviceId); ps.setString(2, "MILLPERSIDtoMRN"); ps.setString(3, UUID.randomUUID().toString()); ResultSet rs = ps.executeQuery(); while (rs.next()) { //do nothing } } finally { if (ps != null) { ps.close(); } } } long end = System.currentTimeMillis(); LOG.info("Took " + (end-start) + " ms"); //close connection conn.close(); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixEncounters(String table) { LOG.info("Fixing encounters from " + table); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm"); Date cutoff = sdf.parse("2018-03-14 11:42"); EntityManager entityManager = ConnectionManager.getAdminEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<UUID> serviceIds = new ArrayList<>(); Map<UUID, UUID> hmSystems = new HashMap<>(); String sql = "SELECT service_id, system_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { UUID serviceId = UUID.fromString(rs.getString(1)); UUID systemId = UUID.fromString(rs.getString(2)); serviceIds.add(serviceId); hmSystems.put(serviceId, systemId); } rs.close(); statement.close(); entityManager.close(); for (UUID serviceId: serviceIds) { UUID systemId = hmSystems.get(serviceId); LOG.info("Doing service " + serviceId + " and system " + systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, systemId); List<UUID> exchangeIdsToProcess = new ArrayList<>(); for (UUID exchangeId: exchangeIds) { List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit: audits) { Date d = audit.getStarted(); if (d.after(cutoff)) { exchangeIdsToProcess.add(exchangeId); break; } } } Map<String, ReferenceList> consultationNewChildMap = new HashMap<>(); Map<String, ReferenceList> observationChildMap = new HashMap<>(); Map<String, ReferenceList> newProblemChildren = new HashMap<>(); for (UUID exchangeId: exchangeIdsToProcess) { Exchange exchange = exchangeDal.getExchange(exchangeId); String[] files = ExchangeHelper.parseExchangeBodyIntoFileList(exchange.getBody()); String version = EmisCsvToFhirTransformer.determineVersion(files); List<String> interestingFiles = new ArrayList<>(); for (String file: files) { if (file.indexOf("CareRecord_Consultation") > -1 || file.indexOf("CareRecord_Observation") > -1 || file.indexOf("CareRecord_Diary") > -1 || file.indexOf("Prescribing_DrugRecord") > -1 || file.indexOf("Prescribing_IssueRecord") > -1 || file.indexOf("CareRecord_Problem") > -1) { interestingFiles.add(file); } } files = interestingFiles.toArray(new String[0]); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.createParsers(serviceId, systemId, exchangeId, files, version, parsers); String dataSharingAgreementGuid = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(parsers); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, systemId, exchangeId, dataSharingAgreementGuid, true); Consultation consultationParser = (Consultation)parsers.get(Consultation.class); while (consultationParser.nextRecord()) { CsvCell consultationGuid = consultationParser.getConsultationGuid(); CsvCell patientGuid = consultationParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); consultationNewChildMap.put(sourceId, new ReferenceList()); } Problem problemParser = (Problem)parsers.get(Problem.class); while (problemParser.nextRecord()) { CsvCell problemGuid = problemParser.getObservationGuid(); CsvCell patientGuid = problemParser.getPatientGuid(); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); newProblemChildren.put(sourceId, new ReferenceList()); } //run this pre-transformer to pre-cache some stuff in the csv helper, which //is needed when working out the resource type that each observation would be saved as ObservationPreTransformer.transform(version, parsers, null, csvHelper); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { CsvCell observationGuid = observationParser.getObservationGuid(); CsvCell patientGuid = observationParser.getPatientGuid(); String obSourceId = EmisCsvHelper.createUniqueId(patientGuid, observationGuid); CsvCell codeId = observationParser.getCodeId(); if (codeId.isEmpty()) { continue; } ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); UUID obUuid = IdHelper.getEdsResourceId(serviceId, resourceType, obSourceId); if (obUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + resourceType + " and source ID " + obSourceId); //resourceType = ObservationTransformer.getTargetResourceType(observationParser, csvHelper); } Reference obReference = ReferenceHelper.createReference(resourceType, obUuid.toString()); CsvCell consultationGuid = observationParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell problemGuid = observationParser.getProblemGuid(); if (!problemGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(obReference); } CsvCell parentObGuid = observationParser.getParentObservationGuid(); if (!parentObGuid.isEmpty()) { String sourceId = EmisCsvHelper.createUniqueId(patientGuid, parentObGuid); ReferenceList referenceList = observationChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); observationChildMap.put(sourceId, referenceList); } referenceList.add(obReference); } } Diary diaryParser = (Diary)parsers.get(Diary.class); while (diaryParser.nextRecord()) { CsvCell consultationGuid = diaryParser.getConsultationGuid(); if (!consultationGuid.isEmpty()) { CsvCell diaryGuid = diaryParser.getDiaryGuid(); CsvCell patientGuid = diaryParser.getPatientGuid(); String diarySourceId = EmisCsvHelper.createUniqueId(patientGuid, diaryGuid); UUID diaryUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.ProcedureRequest, diarySourceId); if (diaryUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.ProcedureRequest + " and source ID " + diarySourceId); } Reference diaryReference = ReferenceHelper.createReference(ResourceType.ProcedureRequest, diaryUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, consultationGuid); ReferenceList referenceList = consultationNewChildMap.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); consultationNewChildMap.put(sourceId, referenceList); } referenceList.add(diaryReference); } } IssueRecord issueRecordParser = (IssueRecord)parsers.get(IssueRecord.class); while (issueRecordParser.nextRecord()) { CsvCell problemGuid = issueRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell issueRecordGuid = issueRecordParser.getIssueRecordGuid(); CsvCell patientGuid = issueRecordParser.getPatientGuid(); String issueRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, issueRecordGuid); UUID issueRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationOrder, issueRecordSourceId); if (issueRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationOrder + " and source ID " + issueRecordSourceId); } Reference issueRecordReference = ReferenceHelper.createReference(ResourceType.MedicationOrder, issueRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(issueRecordReference); } } DrugRecord drugRecordParser = (DrugRecord)parsers.get(DrugRecord.class); while (drugRecordParser.nextRecord()) { CsvCell problemGuid = drugRecordParser.getProblemObservationGuid(); if (!problemGuid.isEmpty()) { CsvCell drugRecordGuid = drugRecordParser.getDrugRecordGuid(); CsvCell patientGuid = drugRecordParser.getPatientGuid(); String drugRecordSourceId = EmisCsvHelper.createUniqueId(patientGuid, drugRecordGuid); UUID drugRecordUuid = IdHelper.getEdsResourceId(serviceId, ResourceType.MedicationStatement, drugRecordSourceId); if (drugRecordUuid == null) { continue; //LOG.error("Null observation UUID for resource type " + ResourceType.MedicationStatement + " and source ID " + drugRecordSourceId); } Reference drugRecordReference = ReferenceHelper.createReference(ResourceType.MedicationStatement, drugRecordUuid.toString()); String sourceId = EmisCsvHelper.createUniqueId(patientGuid, problemGuid); ReferenceList referenceList = newProblemChildren.get(sourceId); if (referenceList == null) { referenceList = new ReferenceList(); newProblemChildren.put(sourceId, referenceList); } referenceList.add(drugRecordReference); } } for (AbstractCsvParser parser : parsers.values()) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } ResourceDalI resourceDal = DalProvider.factoryResourceDal(); LOG.info("Found " + consultationNewChildMap.size() + " Encounters to fix"); for (String encounterSourceId: consultationNewChildMap.keySet()) { ReferenceList childReferences = consultationNewChildMap.get(encounterSourceId); //map to UUID UUID encounterId = IdHelper.getEdsResourceId(serviceId, ResourceType.Encounter, encounterSourceId); if (encounterId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Encounter.toString(), encounterId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Encounter " + encounterId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Encounter encounter = (Encounter) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Encounter encounter = (Encounter)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); EncounterBuilder encounterBuilder = new EncounterBuilder(encounter); ContainedListBuilder containedListBuilder = new ContainedListBuilder(encounterBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(encounter); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } LOG.info("Found " + observationChildMap.size() + " Parent Observations to fix"); for (String sourceId: observationChildMap.keySet()) { ReferenceList childReferences = observationChildMap.get(sourceId); //map to UUID ResourceType resourceType = null; UUID resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.Observation, sourceId); if (resourceId != null) { resourceType = ResourceType.Observation; } else { resourceId = IdHelper.getEdsResourceId(serviceId, ResourceType.DiagnosticReport, sourceId); if (resourceId != null) { resourceType = ResourceType.DiagnosticReport; } else { continue; } } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, resourceType.toString(), resourceId); if (history.isEmpty()) { //throw new Exception("Empty history for " + resourceType + " " + resourceId); continue; } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (resourceType == ResourceType.Observation) { if (wrapper.getResourceData() != null) { Observation observation = (Observation) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (observation.hasRelated()) { for (Observation.ObservationRelatedComponent related : observation.getRelated()) { Reference reference = related.getTarget(); childReferences.add(reference); } } } } else { if (wrapper.getResourceData() != null) { DiagnosticReport report = (DiagnosticReport) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); if (report.hasResult()) { for (Reference reference : report.getResult()) { childReferences.add(reference); } } } } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Resource resource = FhirSerializationHelper.deserializeResource(currentState.getResourceData()); boolean changed = false; if (resourceType == ResourceType.Observation) { ObservationBuilder resourceBuilder = new ObservationBuilder((Observation)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addChildObservation(reference)) { changed = true; } } } else { DiagnosticReportBuilder resourceBuilder = new DiagnosticReportBuilder((DiagnosticReport)resource); for (int i=0; i<childReferences.size(); i++) { Reference reference = childReferences.getReference(i); if (resourceBuilder.addResult(reference)) { changed = true; } } } if (changed) { String newJson = FhirSerializationHelper.serializeResource(resource); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); }*//* } LOG.info("Found " + newProblemChildren.size() + " Problems to fix"); for (String sourceId: newProblemChildren.keySet()) { ReferenceList childReferences = newProblemChildren.get(sourceId); //map to UUID UUID conditionId = IdHelper.getEdsResourceId(serviceId, ResourceType.Condition, sourceId); if (conditionId == null) { continue; } //get history, which is most recent FIRST List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Condition.toString(), conditionId); if (history.isEmpty()) { continue; //throw new Exception("Empty history for Condition " + conditionId); } ResourceWrapper currentState = history.get(0); if (currentState.isDeleted()) { continue; } //find last instance prior to cutoff and get its linked children for (ResourceWrapper wrapper: history) { Date d = wrapper.getCreatedAt(); if (!d.after(cutoff)) { if (wrapper.getResourceData() != null) { Condition previousVersion = (Condition) FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(previousVersion); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); List<Reference> previousChildren = containedListBuilder.getContainedListItems(); childReferences.add(previousChildren); } break; } } if (childReferences.size() == 0) { continue; } String json = currentState.getResourceData(); Resource resource = FhirSerializationHelper.deserializeResource(json); String newJson = FhirSerializationHelper.serializeResource(resource); if (!json.equals(newJson)) { currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState); } *//*Condition condition = (Condition)FhirSerializationHelper.deserializeResource(currentState.getResourceData()); ConditionBuilder conditionBuilder = new ConditionBuilder(condition); ContainedListBuilder containedListBuilder = new ContainedListBuilder(conditionBuilder); containedListBuilder.addReferences(childReferences); String newJson = FhirSerializationHelper.serializeResource(condition); currentState.setResourceData(newJson); currentState.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); saveResourceWrapper(serviceId, currentState);*//* } //mark as done String updateSql = "UPDATE " + table + " SET done = 1 WHERE service_id = '" + serviceId + "';"; entityManager = ConnectionManager.getAdminEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); } */ /** * For each practice: * Go through all files processed since 14 March * Cache all links as above * Cache all Encounters saved too * <p> * For each Encounter referenced at all: * Retrieve latest version from resource current * Retrieve version prior to 14 March * Update current version with old references plus new ones * <p> * For each parent observation: * Retrieve latest version (could be observation or diagnostic report) * <p> * For each problem: * Retrieve latest version from resource current * Check if still a problem: * Retrieve version prior to 14 March * Update current version with old references plus new ones *//* LOG.info("Finished Fixing encounters from " + table); } catch (Throwable t) { LOG.error("", t); } }*/ private static void saveResourceWrapper(UUID serviceId, ResourceWrapper wrapper) throws Exception { if (wrapper.getVersion() == null) { throw new Exception("Can't update resource history without version UUID"); } if (wrapper.getResourceData() != null) { long checksum = FhirStorageService.generateChecksum(wrapper.getResourceData()); wrapper.setResourceChecksum(new Long(checksum)); } EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); entityManager.getTransaction().begin(); String json = wrapper.getResourceData(); json = json.replace("'", "''"); json = json.replace("\\", "\\\\"); String patientId = ""; if (wrapper.getPatientId() != null) { patientId = wrapper.getPatientId().toString(); } String updateSql = "UPDATE resource_current" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE service_id = '" + wrapper.getServiceId() + "'" + " AND patient_id = '" + patientId + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" + " AND resource_id = '" + wrapper.getResourceId() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); //SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:SS"); //String createdAtStr = sdf.format(wrapper.getCreatedAt()); updateSql = "UPDATE resource_history" + " SET resource_data = '" + json + "'," + " resource_checksum = " + wrapper.getResourceChecksum() + " WHERE resource_id = '" + wrapper.getResourceId() + "'" + " AND resource_type = '" + wrapper.getResourceType() + "'" //+ " AND created_at = '" + createdAtStr + "'" + " AND version = '" + wrapper.getVersion() + "'"; statement.executeUpdate(updateSql); //LOG.debug(updateSql); entityManager.getTransaction().commit(); } /*private static void populateNewSearchTable(String table) { LOG.info("Populating New Search Table"); try { EntityManager entityManager = ConnectionManager.getEdsEntityManager(); SessionImpl session = (SessionImpl)entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); List<String> patientIds = new ArrayList<>(); Map<String, String> serviceIds = new HashMap<>(); String sql = "SELECT patient_id, service_id FROM " + table + " WHERE done = 0"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientId = rs.getString(1); String serviceId = rs.getString(2); patientIds.add(patientId); serviceIds.put(patientId, serviceId); } rs.close(); statement.close(); entityManager.close(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); LOG.info("Found " + patientIds.size() + " to do"); for (int i=0; i<patientIds.size(); i++) { String patientIdStr = patientIds.get(i); UUID patientId = UUID.fromString(patientIdStr); String serviceIdStr = serviceIds.get(patientIdStr); UUID serviceId = UUID.fromString(serviceIdStr); Patient patient = (Patient)resourceDal.getCurrentVersionAsResource(serviceId, ResourceType.Patient, patientIdStr); if (patient != null) { LOG.debug("Updating for patient " + patientIdStr); patientSearchDal.update(serviceId, patient); LOG.debug("Done"); } else { List<ResourceWrapper> history = resourceDal.getResourceHistory(serviceId, ResourceType.Patient.toString(), patientId); if (history.isEmpty()) { LOG.debug("No history found for patient " + patientIdStr); } else { ResourceWrapper first = history.get(0); if (!first.isDeleted()) { throw new Exception("Resource current null for " + ResourceType.Patient + " " + patientIdStr + " but not deleted in resource_history"); } //find first non-deleted instance and update for it, then delete for (ResourceWrapper historyItem: history) { if (!historyItem.isDeleted()) { patient = (Patient)FhirSerializationHelper.deserializeResource(historyItem.getResourceData()); LOG.debug("Patient is deleted, so updating for deleted patient " + patientIdStr); patientSearchDal.update(serviceId, patient); patientSearchDal.deletePatient(serviceId, patient); LOG.debug("Done"); break; } } } } //find episode of care //note, we don't have any current way to retrieve deleted episodes of care for a patient, so can only do this for non-deleted ones List<ResourceWrapper> wrappers = resourceDal.getResourcesByPatient(serviceId, patientId, ResourceType.EpisodeOfCare.toString()); for (ResourceWrapper wrapper: wrappers) { if (!wrapper.isDeleted()) { LOG.debug("Updating for episodeOfCare resource " + wrapper.getResourceId()); EpisodeOfCare episodeOfCare = (EpisodeOfCare)FhirSerializationHelper.deserializeResource(wrapper.getResourceData()); patientSearchDal.update(serviceId, episodeOfCare); LOG.debug("Done"); } else { LOG.debug("EpisodeOfCare " + wrapper.getResourceId() + " is deleted"); } } String updateSql = "UPDATE " + table + " SET done = 1 WHERE patient_id = '" + patientIdStr + "' AND service_id = '" + serviceIdStr + "';"; entityManager = ConnectionManager.getEdsEntityManager(); session = (SessionImpl)entityManager.getDelegate(); connection = session.connection(); statement = connection.createStatement(); entityManager.getTransaction().begin(); statement.executeUpdate(updateSql); entityManager.getTransaction().commit(); if (i % 5000 == 0) { LOG.info("Done " + (i+1) + " of " + patientIds.size()); } } entityManager.close(); LOG.info("Finished Populating New Search Table"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void createBartsSubset(String sourceDir, UUID serviceUuid, UUID systemUuid, String samplePatientsFile) { LOG.info("Creating Barts Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } createBartsSubsetForFile(sourceDir, serviceUuid, systemUuid, personIds); LOG.info("Finished Creating Barts Subset"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void createBartsSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { for (File sourceFile: sourceDir.listFiles()) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } LOG.info("Doing dir " + sourceFile); createBartsSubsetForFile(sourceFile, destFile, personIds); } else { //we have some bad partial files in, so ignore them String ext = FilenameUtils.getExtension(name); if (ext.equalsIgnoreCase("filepart")) { continue; } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String baseName = FilenameUtils.getBaseName(name); String fileType = BartsCsvToFhirTransformer.identifyFileType(baseName); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex ++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i=0; i<expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex+1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ /*private static void createBartsSubsetForFile(String sourceDir, UUID serviceUuid, UUID systemUuid, Set<String> personIds) throws Exception { ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<Exchange> exchanges = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); for (Exchange exchange : exchanges) { List<ExchangePayloadFile> files = ExchangeHelper.parseExchangeBody(exchange.getBody()); for (ExchangePayloadFile fileObj : files) { String filePathWithoutSharedStorage = fileObj.getPath().substring(TransformConfig.instance().getSharedStoragePath().length() + 1); String sourceFilePath = FilenameUtils.concat(sourceDir, filePathWithoutSharedStorage); File sourceFile = new File(sourceFilePath); String destFilePath = fileObj.getPath(); File destFile = new File(destFilePath); File destDir = destFile.getParentFile(); if (!destDir.exists()) { destDir.mkdirs(); } //if the file is empty, we still need the empty file in the filtered directory, so just copy it if (sourceFile.length() == 0) { LOG.info("Copying empty file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } continue; } String fileType = fileObj.getType(); if (isCerner22File(fileType)) { LOG.info("Checking 2.2 file " + sourceFile); if (destFile.exists()) { destFile.delete(); } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); int lineIndex = -1; PrintWriter pw = null; int personIdColIndex = -1; int expectedCols = -1; while (true) { String line = br.readLine(); if (line == null) { break; } lineIndex++; if (lineIndex == 0) { if (fileType.equalsIgnoreCase("FAMILYHISTORY")) { //this file has no headers, so needs hard-coding personIdColIndex = 5; } else { //check headings for PersonID col String[] toks = line.split("\\|", -1); expectedCols = toks.length; for (int i = 0; i < expectedCols; i++) { String col = toks[i]; if (col.equalsIgnoreCase("PERSON_ID") || col.equalsIgnoreCase("#PERSON_ID")) { personIdColIndex = i; break; } } //if no person ID, then just copy the entire file if (personIdColIndex == -1) { br.close(); br = null; LOG.info(" Copying 2.2 file to " + destFile); copyFile(sourceFile, destFile); break; } else { LOG.info(" Filtering 2.2 file to " + destFile + ", person ID col at " + personIdColIndex); } } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); pw = new PrintWriter(bw); } else { //filter on personID String[] toks = line.split("\\|", -1); if (expectedCols != -1 && toks.length != expectedCols) { throw new Exception("Line " + (lineIndex + 1) + " has " + toks.length + " cols but expecting " + expectedCols); } else { String personId = toks[personIdColIndex]; if (!Strings.isNullOrEmpty(personId) //always carry over rows with empty person ID, as Cerner won't send the person ID for deletes && !personIds.contains(personId)) { continue; } } } pw.println(line); } if (br != null) { br.close(); } if (pw != null) { pw.flush(); pw.close(); } } else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); if (!destFile.exists()) { copyFile(sourceFile, destFile); } } } } }*/ private static void copyFile(File src, File dst) throws Exception { FileInputStream fis = new FileInputStream(src); BufferedInputStream bis = new BufferedInputStream(fis); Files.copy(bis, dst.toPath()); bis.close(); } private static boolean isCerner22File(String fileType) throws Exception { if (fileType.equalsIgnoreCase("PPATI") || fileType.equalsIgnoreCase("PPREL") || fileType.equalsIgnoreCase("CDSEV") || fileType.equalsIgnoreCase("PPATH") || fileType.equalsIgnoreCase("RTTPE") || fileType.equalsIgnoreCase("AEATT") || fileType.equalsIgnoreCase("AEINV") || fileType.equalsIgnoreCase("AETRE") || fileType.equalsIgnoreCase("OPREF") || fileType.equalsIgnoreCase("OPATT") || fileType.equalsIgnoreCase("EALEN") || fileType.equalsIgnoreCase("EALSU") || fileType.equalsIgnoreCase("EALOF") || fileType.equalsIgnoreCase("HPSSP") || fileType.equalsIgnoreCase("IPEPI") || fileType.equalsIgnoreCase("IPWDS") || fileType.equalsIgnoreCase("DELIV") || fileType.equalsIgnoreCase("BIRTH") || fileType.equalsIgnoreCase("SCHAC") || fileType.equalsIgnoreCase("APPSL") || fileType.equalsIgnoreCase("DIAGN") || fileType.equalsIgnoreCase("PROCE") || fileType.equalsIgnoreCase("ORDER") || fileType.equalsIgnoreCase("DOCRP") || fileType.equalsIgnoreCase("DOCREF") || fileType.equalsIgnoreCase("CNTRQ") || fileType.equalsIgnoreCase("LETRS") || fileType.equalsIgnoreCase("LOREF") || fileType.equalsIgnoreCase("ORGREF") || fileType.equalsIgnoreCase("PRSNLREF") || fileType.equalsIgnoreCase("CVREF") || fileType.equalsIgnoreCase("NOMREF") || fileType.equalsIgnoreCase("EALIP") || fileType.equalsIgnoreCase("CLEVE") || fileType.equalsIgnoreCase("ENCNT") || fileType.equalsIgnoreCase("RESREF") || fileType.equalsIgnoreCase("PPNAM") || fileType.equalsIgnoreCase("PPADD") || fileType.equalsIgnoreCase("PPPHO") || fileType.equalsIgnoreCase("PPALI") || fileType.equalsIgnoreCase("PPINF") || fileType.equalsIgnoreCase("PPAGP") || fileType.equalsIgnoreCase("SURCC") || fileType.equalsIgnoreCase("SURCP") || fileType.equalsIgnoreCase("SURCA") || fileType.equalsIgnoreCase("SURCD") || fileType.equalsIgnoreCase("PDRES") || fileType.equalsIgnoreCase("PDREF") || fileType.equalsIgnoreCase("ABREF") || fileType.equalsIgnoreCase("CEPRS") || fileType.equalsIgnoreCase("ORDDT") || fileType.equalsIgnoreCase("STATREF") || fileType.equalsIgnoreCase("STATA") || fileType.equalsIgnoreCase("ENCINF") || fileType.equalsIgnoreCase("SCHDETAIL") || fileType.equalsIgnoreCase("SCHOFFER") || fileType.equalsIgnoreCase("PPGPORG") || fileType.equalsIgnoreCase("FAMILYHISTORY")) { return true; } else { return false; } } /*private static void fixSubscriberDbs() { LOG.info("Fixing Subscriber DBs"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-05-11"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info(" Posting exchange " + exchangeId + " with " + batches.size() + " batches"); List<UUID> batchIds = new ArrayList<>(); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); batchIds.add(batchId); } String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } LOG.info("Finished Fixing Subscriber DBs"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixReferralRequests() { LOG.info("Fixing Referral Requests"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDal = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); Date dateError = new SimpleDateFormat("yyyy-MM-dd").parse("2018-04-24"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); boolean needsFixing = false; Set<UUID> patientIdsToPost = new HashSet<>(); for (UUID exchangeId: exchangeIds) { if (!needsFixing) { List<ExchangeTransformAudit> transformAudits = exchangeDal.getAllExchangeTransformAudits(serviceId, endpointSystemId, exchangeId); for (ExchangeTransformAudit audit: transformAudits) { Date transfromStart = audit.getStarted(); if (!transfromStart.before(dateError)) { needsFixing = true; break; } } } if (!needsFixing) { continue; } List<ExchangeBatch> batches = exchangeBatchDal.retrieveForExchangeId(exchangeId); Exchange exchange = exchangeDal.getExchange(exchangeId); LOG.info("Checking exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } UUID batchId = batch.getBatchId(); List<ResourceWrapper> wrappers = resourceDal.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper wrapper: wrappers) { String resourceType = wrapper.getResourceType(); if (!resourceType.equals(ResourceType.ReferralRequest.toString()) || wrapper.isDeleted()) { continue; } String json = wrapper.getResourceData(); ReferralRequest referral = (ReferralRequest)FhirSerializationHelper.deserializeResource(json); *//*if (!referral.hasServiceRequested()) { continue; } CodeableConcept reason = referral.getServiceRequested().get(0); referral.setReason(reason); referral.getServiceRequested().clear();*//* if (!referral.hasReason()) { continue; } CodeableConcept reason = referral.getReason(); referral.setReason(null); referral.addServiceRequested(reason); json = FhirSerializationHelper.serializeResource(referral); wrapper.setResourceData(json); saveResourceWrapper(serviceId, wrapper); //add to the set of patients we know need sending on to the protocol queue patientIdsToPost.add(patientId); LOG.info("Fixed " + resourceType + " " + wrapper.getResourceId() + " in batch " + batchId); } //if our patient has just been fixed or was fixed before, post onto the protocol queue if (patientIdsToPost.contains(patientId)) { List<UUID> batchIds = new ArrayList<>(); batchIds.add(batchId); String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } } } } LOG.info("Finished Fixing Referral Requests"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void applyEmisAdminCaches() { LOG.info("Applying Emis Admin Caches"); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); UUID emisSystem = UUID.fromString("991a9068-01d3-4ff2-86ed-249bd0541fb3"); UUID emisSystemDev = UUID.fromString("55c08fa5-ef1e-4e94-aadc-e3d6adc80774"); List<Service> services = serviceDal.getAll(); for (Service service: services) { String endpointsJson = service.getEndpoints(); if (Strings.isNullOrEmpty(endpointsJson)) { continue; } UUID serviceId = service.getId(); LOG.info("Checking " + service.getName() + " " + serviceId); List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); if (!endpointSystemId.equals(emisSystem) && !endpointSystemId.equals(emisSystemDev)) { LOG.info(" Skipping system ID " + endpointSystemId + " as not Emis"); continue; } if (!exchangeDal.isServiceStarted(serviceId, endpointSystemId)) { LOG.info(" Service not started, so skipping"); continue; } //get exchanges List<UUID> exchangeIds = exchangeDal.getExchangeIdsForService(serviceId, endpointSystemId); if (exchangeIds.isEmpty()) { LOG.info(" No exchanges found, so skipping"); continue; } UUID firstExchangeId = exchangeIds.get(0); List<ExchangeEvent> events = exchangeDal.getExchangeEvents(firstExchangeId); boolean appliedAdminCache = false; for (ExchangeEvent event: events) { if (event.getEventDesc().equals("Applied Emis Admin Resource Cache")) { appliedAdminCache = true; } } if (appliedAdminCache) { LOG.info(" Have already applied admin cache, so skipping"); continue; } Exchange exchange = exchangeDal.getExchange(firstExchangeId); String body = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(body); if (files.length == 0) { LOG.info(" No files in exchange " + firstExchangeId + " so skipping"); continue; } String firstFilePath = files[0]; String name = FilenameUtils.getBaseName(firstFilePath); //file name without extension String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + firstFilePath); } String sharingAgreementGuid = toks[4]; List<UUID> batchIds = new ArrayList<>(); TransformError transformError = new TransformError(); FhirResourceFiler fhirResourceFiler = new FhirResourceFiler(firstExchangeId, serviceId, endpointSystemId, transformError, batchIds); EmisCsvHelper csvHelper = new EmisCsvHelper(fhirResourceFiler.getServiceId(), fhirResourceFiler.getSystemId(), fhirResourceFiler.getExchangeId(), sharingAgreementGuid, true); ExchangeTransformAudit transformAudit = new ExchangeTransformAudit(); transformAudit.setServiceId(serviceId); transformAudit.setSystemId(endpointSystemId); transformAudit.setExchangeId(firstExchangeId); transformAudit.setId(UUID.randomUUID()); transformAudit.setStarted(new Date()); LOG.info(" Going to apply admin resource cache"); csvHelper.applyAdminResourceCache(fhirResourceFiler); fhirResourceFiler.waitToFinish(); for (UUID batchId: batchIds) { LOG.info(" Created batch ID " + batchId + " for exchange " + firstExchangeId); } transformAudit.setEnded(new Date()); transformAudit.setNumberBatchesCreated(new Integer(batchIds.size())); boolean hadError = false; if (transformError.getError().size() > 0) { transformAudit.setErrorXml(TransformErrorSerializer.writeToXml(transformError)); hadError = true; } exchangeDal.save(transformAudit); //clear down the cache of reference mappings since they won't be of much use for the next Exchange IdHelper.clearCache(); if (hadError) { LOG.error(" <<<<<<Error applying resource cache!"); continue; } //add the event to say we've applied the cache AuditWriter.writeExchangeEvent(firstExchangeId, "Applied Emis Admin Resource Cache"); //post that ONE new batch ID onto the protocol queue String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchIds.toArray()); exchange.setHeader(HeaderKeys.BatchIdsJson, batchUuidsStr); PostMessageToExchangeConfig exchangeConfig = QueueHelper.findExchangeConfig("EdsProtocol"); PostMessageToExchange component = new PostMessageToExchange(exchangeConfig); component.process(exchange); } } LOG.info("Finished Applying Emis Admin Caches"); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixBartsEscapedFiles(String filePath) { LOG.info("Fixing Barts Escaped Files in " + filePath); try { fixBartsEscapedFilesInDir(new File(filePath)); LOG.info("Finished fixing Barts Escaped Files in " + filePath); } catch (Throwable t) { LOG.error("", t); } } /** * fixes Emis extract(s) when a practice was disabled then subsequently re-bulked, by * replacing the "delete" extracts with newly generated deltas that can be processed * before the re-bulk is done */ /*private static void fixDisabledEmisExtract(String serviceOdsCode, String systemId, String sharedStoragePath, String tempDirParent) { LOG.info("Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceOdsCode); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Service service = serviceDal.getByLocalIdentifier(serviceOdsCode); LOG.info("Service " + service.getId() + " " + service.getName() + " " + service.getLocalId()); *//*File tempDirLast = new File(tempDir, "last"); if (!tempDirLast.exists()) { if (!tempDirLast.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirLast); } tempDirLast.mkdirs(); } File tempDirEmpty = new File(tempDir, "empty"); if (!tempDirEmpty.exists()) { if (!tempDirEmpty.mkdirs()) { throw new Exception("Failed to create temp dir " + tempDirEmpty); } tempDirEmpty.mkdirs(); }*//* String tempDir = FilenameUtils.concat(tempDirParent, serviceOdsCode); File f = new File(tempDir); if (f.exists()) { FileUtils.deleteDirectory(f); } UUID serviceUuid = service.getId(); UUID systemUuid = UUID.fromString(systemId); ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); //get all the exchanges, which are returned in reverse order, most recent first List<Exchange> exchangesDesc = exchangeDal.getExchangesByService(serviceUuid, systemUuid, Integer.MAX_VALUE); Map<Exchange, List<String>> hmExchangeFiles = new HashMap<>(); Map<Exchange, List<String>> hmExchangeFilesWithoutStoragePrefix = new HashMap<>(); //reverse the exchange list and cache the files for each one List<Exchange> exchanges = new ArrayList<>(); for (int i = exchangesDesc.size() - 1; i >= 0; i--) { Exchange exchange = exchangesDesc.get(i); String exchangeBody = exchange.getBody(); String[] files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); //drop out and ignore any exchanges containing the singular bespoke reg status files if (files.length <= 1) { continue; } //drop out and ignore any exchanges for the left and dead extracts, since we don't //expect to receive re-bulked data for the dead patients String firstFile = files[0]; if (firstFile.indexOf("LEFT_AND_DEAD") > -1) { continue; } exchanges.add(exchange); //populate the map of the files with the shared storage prefix List<String> fileList = Lists.newArrayList(files); hmExchangeFiles.put(exchange, fileList); //populate a map of the same files without the prefix files = ExchangeHelper.parseExchangeBodyOldWay(exchangeBody); for (int j = 0; j < files.length; j++) { String file = files[j].substring(sharedStoragePath.length() + 1); files[j] = file; } fileList = Lists.newArrayList(files); hmExchangeFilesWithoutStoragePrefix.put(exchange, fileList); } *//*exchanges.sort((o1, o2) -> { Date d1 = o1.getTimestamp(); Date d2 = o2.getTimestamp(); return d1.compareTo(d2); });*//* LOG.info("Found " + exchanges.size() + " exchanges and cached their files"); int indexDisabled = -1; int indexRebulked = -1; int indexOriginallyBulked = -1; //go back through them to find the extract where the re-bulk is and when it was disabled (the list is in date order, so we're iterating most-recent first) for (int i = exchanges.size() - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { indexDisabled = i; } else { if (indexDisabled == -1) { indexRebulked = i; } else { //if we've found a non-disabled extract older than the disabled ones, //then we've gone far enough back break; } } } //go back from when disabled to find the previous bulk load (i.e. the first one or one after it was previously not disabled) for (int i = indexDisabled - 1; i >= 0; i--) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); boolean disabled = isDisabledInSharingAgreementFile(files); if (disabled) { break; } indexOriginallyBulked = i; } if (indexOriginallyBulked > -1) { Exchange exchangeOriginallyBulked = exchanges.get(indexOriginallyBulked); LOG.info("Originally bulked on " + findExtractDate(exchangeOriginallyBulked, hmExchangeFiles) + " " + exchangeOriginallyBulked.getId()); } if (indexDisabled > -1) { Exchange exchangeDisabled = exchanges.get(indexDisabled); LOG.info("Disabled on " + findExtractDate(exchangeDisabled, hmExchangeFiles) + " " + exchangeDisabled.getId()); } if (indexRebulked > -1) { Exchange exchangeRebulked = exchanges.get(indexRebulked); LOG.info("Rebulked on " + findExtractDate(exchangeRebulked, hmExchangeFiles) + " " + exchangeRebulked.getId()); } if (indexDisabled == -1 || indexRebulked == -1 || indexOriginallyBulked == -1) { throw new Exception("Failed to find exchanges for original bulk (" + indexOriginallyBulked + ") disabling (" + indexDisabled + ") or re-bulking (" + indexRebulked + ")"); } //continueOrQuit(); Exchange exchangeRebulked = exchanges.get(indexRebulked); List<String> rebulkFiles = hmExchangeFiles.get(exchangeRebulked); List<String> tempFilesCreated = new ArrayList<>(); Set<String> patientGuidsDeletedOrTooOld = new HashSet<>(); for (String rebulkFile : rebulkFiles) { String fileType = findFileType(rebulkFile); if (!isPatientFile(fileType)) { continue; } LOG.info("Doing " + fileType); String guidColumnName = getGuidColumnName(fileType); //find all the guids in the re-bulk Set<String> idsInRebulk = new HashSet<>(); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(rebulkFile); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); String[] headers = null; try { headers = CsvHelper.getHeaderMapAsArray(csvParser); Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); //get the patient and row guid out of the file and cache in our set String id = record.get("PatientGuid"); if (!Strings.isNullOrEmpty(guidColumnName)) { id += "//" + record.get(guidColumnName); } idsInRebulk.add(id); } } finally { csvParser.close(); } LOG.info("Found " + idsInRebulk.size() + " IDs in re-bulk file: " + rebulkFile); //create a replacement file for the exchange the service was disabled String replacementDisabledFile = null; Exchange exchangeDisabled = exchanges.get(indexDisabled); List<String> disabledFiles = hmExchangeFilesWithoutStoragePrefix.get(exchangeDisabled); for (String s : disabledFiles) { String disabledFileType = findFileType(s); if (disabledFileType.equals(fileType)) { replacementDisabledFile = FilenameUtils.concat(tempDir, s); File dir = new File(replacementDisabledFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } tempFilesCreated.add(s); LOG.info("Created replacement file " + replacementDisabledFile); } } FileWriter fileWriter = new FileWriter(replacementDisabledFile); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); Set<String> pastIdsProcessed = new HashSet<>(); //now go through all files of the same type PRIOR to the service was disabled //to find any rows that we'll need to explicitly delete because they were deleted while //the extract was disabled for (int i = indexDisabled - 1; i >= indexOriginallyBulked; i--) { Exchange exchange = exchanges.get(i); String originalFile = null; List<String> files = hmExchangeFiles.get(exchange); for (String s : files) { String originalFileType = findFileType(s); if (originalFileType.equals(fileType)) { originalFile = s; break; } } if (originalFile == null) { continue; } LOG.info(" Reading " + originalFile); reader = FileHelper.readFileReaderFromSharedStorage(originalFile); csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); while (iterator.hasNext()) { CSVRecord record = iterator.next(); String patientGuid = record.get("PatientGuid"); //get the patient and row guid out of the file and cache in our set String uniqueId = patientGuid; if (!Strings.isNullOrEmpty(guidColumnName)) { uniqueId += "//" + record.get(guidColumnName); } //if we're already handled this record in a more recent extract, then skip it if (pastIdsProcessed.contains(uniqueId)) { continue; } pastIdsProcessed.add(uniqueId); //if this ID isn't deleted and isn't in the re-bulk then it means //it WAS deleted in Emis Web but we didn't receive the delete, because it was deleted //from Emis Web while the extract feed was disabled //if the record is deleted, then we won't expect it in the re-bulk boolean deleted = Boolean.parseBoolean(record.get("Deleted")); if (deleted) { //if it's the Patient file, stick the patient GUID in a set so we know full patient record deletes if (fileType.equals("Admin_Patient")) { patientGuidsDeletedOrTooOld.add(patientGuid); } continue; } //if it's not the patient file and we refer to a patient that we know //has been deleted, then skip this row, since we know we're deleting the entire patient record if (patientGuidsDeletedOrTooOld.contains(patientGuid)) { continue; } //if the re-bulk contains a record matching this one, then it's OK if (idsInRebulk.contains(uniqueId)) { continue; } //the rebulk won't contain any data for patients that are now too old (i.e. deducted or deceased > 2 yrs ago), //so any patient ID in the original files but not in the rebulk can be treated like this and any data for them can be skipped if (fileType.equals("Admin_Patient")) { //retrieve the Patient and EpisodeOfCare resource for the patient so we can confirm they are deceased or deducted ResourceDalI resourceDal = DalProvider.factoryResourceDal(); UUID patientUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.Patient, patientGuid); if (patientUuid == null) { throw new Exception("Failed to find patient UUID from GUID [" + patientGuid + "]"); } Patient patientResource = (Patient) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.Patient, patientUuid.toString()); if (patientResource.hasDeceased()) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } UUID episodeUuid = IdHelper.getEdsResourceId(serviceUuid, ResourceType.EpisodeOfCare, patientGuid); //we use the patient GUID for the episode too EpisodeOfCare episodeResource = (EpisodeOfCare) resourceDal.getCurrentVersionAsResource(serviceUuid, ResourceType.EpisodeOfCare, episodeUuid.toString()); if (episodeResource.hasPeriod() && !PeriodHelper.isActive(episodeResource.getPeriod())) { patientGuidsDeletedOrTooOld.add(patientGuid); continue; } } //create a new CSV record, carrying over the GUIDs from the original but marking as deleted String[] newRecord = new String[headers.length]; for (int j = 0; j < newRecord.length; j++) { String header = headers[j]; if (header.equals("PatientGuid") || header.equals("OrganisationGuid") || (!Strings.isNullOrEmpty(guidColumnName) && header.equals(guidColumnName))) { String val = record.get(header); newRecord[j] = val; } else if (header.equals("Deleted")) { newRecord[j] = "true"; } else { newRecord[j] = ""; } } csvPrinter.printRecord((Object[]) newRecord); csvPrinter.flush(); //log out the raw record that's missing from the original StringBuffer sb = new StringBuffer(); sb.append("Record not in re-bulk: "); for (int j = 0; j < record.size(); j++) { if (j > 0) { sb.append(","); } sb.append(record.get(j)); } LOG.info(sb.toString()); } } finally { csvParser.close(); } } csvPrinter.flush(); csvPrinter.close(); //also create a version of the CSV file with just the header and nothing else in for (int i = indexDisabled + 1; i < indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s : exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals(fileType)) { String emptyTempFile = FilenameUtils.concat(tempDir, s); File dir = new File(emptyTempFile).getParentFile(); if (!dir.exists()) { if (!dir.mkdirs()) { throw new Exception("Failed to create directory " + dir); } } fileWriter = new FileWriter(emptyTempFile); bufferedWriter = new BufferedWriter(fileWriter); csvPrinter = new CSVPrinter(bufferedWriter, EmisCsvToFhirTransformer.CSV_FORMAT.withHeader(headers)); csvPrinter.flush(); csvPrinter.close(); tempFilesCreated.add(s); LOG.info("Created empty file " + emptyTempFile); } } } } //we also need to copy the restored sharing agreement file to replace all the period it was disabled String rebulkedSharingAgreementFile = null; for (String s : rebulkFiles) { String fileType = findFileType(s); if (fileType.equals("Agreements_SharingOrganisation")) { rebulkedSharingAgreementFile = s; } } for (int i = indexDisabled; i < indexRebulked; i++) { Exchange ex = exchanges.get(i); List<String> exchangeFiles = hmExchangeFilesWithoutStoragePrefix.get(ex); for (String s : exchangeFiles) { String exchangeFileType = findFileType(s); if (exchangeFileType.equals("Agreements_SharingOrganisation")) { String replacementFile = FilenameUtils.concat(tempDir, s); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkedSharingAgreementFile); File replacementFileObj = new File(replacementFile); Files.copy(inputStream, replacementFileObj.toPath()); inputStream.close(); tempFilesCreated.add(s); } } } //create a script to copy the files into S3 List<String> copyScript = new ArrayList<>(); copyScript.add("#!/bin/bash"); copyScript.add(""); for (String s : tempFilesCreated) { String localFile = FilenameUtils.concat(tempDir, s); copyScript.add("sudo aws s3 cp " + localFile + " s3://discoverysftplanding/endeavour/" + s); } String scriptFile = FilenameUtils.concat(tempDir, "copy.sh"); FileUtils.writeLines(new File(scriptFile), copyScript); LOG.info("Finished - written files to " + tempDir); dumpFileSizes(new File(tempDir)); *//*continueOrQuit(); //back up every file where the service was disabled for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { //first download from S3 to the local temp dir InputStream inputStream = FileHelper.readFileFromSharedStorage(file); String fileName = FilenameUtils.getName(file); String tempPath = FilenameUtils.concat(tempDir, fileName); File downloadDestination = new File(tempPath); Files.copy(inputStream, downloadDestination.toPath()); //then write back to S3 in a sub-dir of the original file String backupPath = FilenameUtils.getPath(file); backupPath = FilenameUtils.concat(backupPath, "Original"); backupPath = FilenameUtils.concat(backupPath, fileName); FileHelper.writeFileToSharedStorage(backupPath, downloadDestination); LOG.info("Backed up " + file + " -> " + backupPath); //delete from temp dir downloadDestination.delete(); } } continueOrQuit(); //copy the new CSV files into the dir where it was disabled List<String> disabledFiles = hmExchangeFiles.get(exchangeDisabled); for (String disabledFile: disabledFiles) { String fileType = findFileType(disabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirLast.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected temp file " + f); } FileHelper.writeFileToSharedStorage(disabledFile, f); LOG.info("Copied " + tempFile + " -> " + disabledFile); } continueOrQuit(); //empty the patient files for any extracts while the service was disabled for (int i=indexDisabled+1; i<indexRebulked; i++) { Exchange otherExchangeDisabled = exchanges.get(i); List<String> otherDisabledFiles = hmExchangeFiles.get(otherExchangeDisabled); for (String otherDisabledFile: otherDisabledFiles) { String fileType = findFileType(otherDisabledFile); if (!isPatientFile(fileType)) { continue; } String tempFile = FilenameUtils.concat(tempDirEmpty.getAbsolutePath(), fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(otherDisabledFile, f); LOG.info("Copied " + tempFile + " -> " + otherDisabledFile); } } continueOrQuit(); //copy the content of the sharing agreement file from when it was re-bulked for (String rebulkFile: rebulkFiles) { String fileType = findFileType(rebulkFile); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File downloadDestination = new File(tempFile); InputStream inputStream = FileHelper.readFileFromSharedStorage(rebulkFile); Files.copy(inputStream, downloadDestination.toPath()); tempFilesCreated.add(tempFile); } } //replace the sharing agreement file for all disabled extracts with the non-disabled one for (int i=indexDisabled; i<indexRebulked; i++) { Exchange exchange = exchanges.get(i); List<String> files = hmExchangeFiles.get(exchange); for (String file: files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { String tempFile = FilenameUtils.concat(tempDir, fileType + ".csv"); File f = new File(tempFile); if (!f.exists()) { throw new Exception("Failed to find expected empty file " + f); } FileHelper.writeFileToSharedStorage(file, f); LOG.info("Copied " + tempFile + " -> " + file); } } } LOG.info("Finished Fixing Disabled Emis Extracts Prior to Re-bulk for service " + serviceId); continueOrQuit(); for (String tempFileCreated: tempFilesCreated) { File f = new File(tempFileCreated); if (f.exists()) { f.delete(); } }*//* } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void dumpFileSizes(File f) { if (f.isDirectory()) { for (File child : f.listFiles()) { dumpFileSizes(child); } } else { String totalSizeReadable = FileUtils.byteCountToDisplaySize(f.length()); LOG.info("" + f + " = " + totalSizeReadable); } }*/ /*private static String findExtractDate(Exchange exchange, Map<Exchange, List<String>> fileMap) throws Exception { List<String> files = fileMap.get(exchange); String file = findSharingAgreementFile(files); String name = FilenameUtils.getBaseName(file); String[] toks = name.split("_"); return toks[3]; }*/ private static boolean isDisabledInSharingAgreementFile(List<String> files) throws Exception { String file = findSharingAgreementFile(files); InputStreamReader reader = FileHelper.readFileReaderFromSharedStorage(file); CSVParser csvParser = new CSVParser(reader, EmisCsvToFhirTransformer.CSV_FORMAT); try { Iterator<CSVRecord> iterator = csvParser.iterator(); CSVRecord record = iterator.next(); String s = record.get("Disabled"); boolean disabled = Boolean.parseBoolean(s); return disabled; } finally { csvParser.close(); } } private static void continueOrQuit() throws Exception { LOG.info("Enter y to continue, anything else to quit"); byte[] bytes = new byte[10]; System.in.read(bytes); char c = (char) bytes[0]; if (c != 'y' && c != 'Y') { System.out.println("Read " + c); System.exit(1); } } private static String getGuidColumnName(String fileType) { if (fileType.equals("Admin_Patient")) { //patient file just has patient GUID, nothing extra return null; } else if (fileType.equals("CareRecord_Consultation")) { return "ConsultationGuid"; } else if (fileType.equals("CareRecord_Diary")) { return "DiaryGuid"; } else if (fileType.equals("CareRecord_Observation")) { return "ObservationGuid"; } else if (fileType.equals("CareRecord_Problem")) { //there is no separate problem GUID, as it's just a modified observation return "ObservationGuid"; } else if (fileType.equals("Prescribing_DrugRecord")) { return "DrugRecordGuid"; } else if (fileType.equals("Prescribing_IssueRecord")) { return "IssueRecordGuid"; } else { throw new IllegalArgumentException(fileType); } } private static String findFileType(String filePath) { String fileName = FilenameUtils.getName(filePath); String[] toks = fileName.split("_"); String domain = toks[1]; String name = toks[2]; return domain + "_" + name; } private static boolean isPatientFile(String fileType) { if (fileType.equals("Admin_Patient") || fileType.equals("CareRecord_Consultation") || fileType.equals("CareRecord_Diary") || fileType.equals("CareRecord_Observation") || fileType.equals("CareRecord_Problem") || fileType.equals("Prescribing_DrugRecord") || fileType.equals("Prescribing_IssueRecord")) { //note the referral file doesn't have a Deleted column, so isn't in this list return true; } else { return false; } } private static String findSharingAgreementFile(List<String> files) throws Exception { for (String file : files) { String fileType = findFileType(file); if (fileType.equals("Agreements_SharingOrganisation")) { return file; } } throw new Exception("Failed to find sharing agreement file in " + files.get(0)); } /*private static void testSlack() { LOG.info("Testing slack"); try { SlackHelper.sendSlackMessage(SlackHelper.Channel.QueueReaderAlerts, "Test Message from Queue Reader"); LOG.info("Finished testing slack"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void postToInboundFromFile(UUID serviceId, UUID systemId, String filePath) { try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); LOG.info("Posting to inbound exchange for " + service.getName() + " from file " + filePath); FileReader fr = new FileReader(filePath); BufferedReader br = new BufferedReader(fr); int count = 0; List<UUID> exchangeIdBatch = new ArrayList<>(); while (true) { String line = br.readLine(); if (line == null) { break; } UUID exchangeId = UUID.fromString(line); //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); if (audit != null && !audit.isResubmitted()) { audit.setResubmitted(true); auditRepository.save(audit); } count ++; exchangeIdBatch.add(exchangeId); if (exchangeIdBatch.size() >= 1000) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); exchangeIdBatch = new ArrayList<>(); LOG.info("Done " + count); } } if (!exchangeIdBatch.isEmpty()) { QueueHelper.postToExchange(exchangeIdBatch, "EdsInbound", null, false); LOG.info("Done " + count); } br.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void postToInbound(UUID serviceId, boolean all) { LOG.info("Posting to inbound for " + serviceId); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); Service service = serviceDalI.getById(serviceId); List<UUID> systemIds = findSystemIds(service); UUID systemId = systemIds.get(0); ExchangeTransformErrorState errorState = auditRepository.getErrorState(serviceId, systemId); for (UUID exchangeId: errorState.getExchangeIdsInError()) { //update the transform audit, so EDS UI knows we've re-queued this exchange ExchangeTransformAudit audit = auditRepository.getMostRecentExchangeTransform(serviceId, systemId, exchangeId); //skip any exchange IDs we've already re-queued up to be processed again if (audit.isResubmitted()) { LOG.debug("Not re-posting " + audit.getExchangeId() + " as it's already been resubmitted"); continue; } LOG.debug("Re-posting " + audit.getExchangeId()); audit.setResubmitted(true); auditRepository.save(audit); //then re-submit the exchange to Rabbit MQ for the queue reader to pick up QueueHelper.postToExchange(exchangeId, "EdsInbound", null, false); if (!all) { LOG.info("Posted first exchange, so stopping"); break; } } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Posting to inbound for " + serviceId); }*/ /*private static void fixPatientSearchAllServices(String filterSystemId) { LOG.info("Fixing patient search for all services and system " + filterSystemId); try { ServiceDalI serviceDal = DalProvider.factoryServiceDal(); List<Service> services = serviceDal.getAll(); for (Service service: services) { fixPatientSearch(service.getId().toString(), filterSystemId); } LOG.info("Finished Fixing patient search for all services and system " + filterSystemId); } catch (Throwable t) { LOG.error("", t); } } private static void fixPatientSearch(String serviceId, String filterSystemId) { LOG.info("Fixing patient search for service " + serviceId); try { UUID serviceUuid = UUID.fromString(serviceId); UUID filterSystemUuid = null; if (!Strings.isNullOrEmpty(filterSystemId)) { filterSystemUuid = UUID.fromString(filterSystemId); } ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); PatientSearchDalI patientSearchDal = DalProvider.factoryPatientSearchDal(); ServiceDalI serviceDal = DalProvider.factoryServiceDal(); Set<UUID> patientsDone = new HashSet<>(); Service service = serviceDal.getById(serviceUuid); List<UUID> systemIds = findSystemIds(service); for (UUID systemId: systemIds) { if (filterSystemUuid != null && !filterSystemUuid.equals(systemId)) { continue; } List<UUID> exchanges = exchangeDalI.getExchangeIdsForService(serviceUuid, systemId); LOG.info("Found " + exchanges.size() + " exchanges for system " + systemId); for (UUID exchangeId : exchanges) { List<ExchangeBatch> batches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); LOG.info("Found " + batches.size() + " batches in exchange " + exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId == null) { continue; } if (patientsDone.contains(patientId)) { continue; } patientsDone.add(patientId); ResourceWrapper wrapper = resourceDalI.getCurrentVersion(serviceUuid, ResourceType.Patient.toString(), patientId); if (wrapper != null) { String json = wrapper.getResourceData(); if (!Strings.isNullOrEmpty(json)) { Patient fhirPatient = (Patient)FhirSerializationHelper.deserializeResource(json); patientSearchDal.update(serviceUuid, fhirPatient); } } if (patientsDone.size() % 1000 == 0) { LOG.info("Done " + patientsDone.size()); } } } } LOG.info("Done " + patientsDone.size()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished fixing patient search for " + serviceId); }*/ private static void runSql(String host, String username, String password, String sqlFile) { LOG.info("Running SQL on " + host + " from " + sqlFile); Connection conn = null; Statement statement = null; try { File f = new File(sqlFile); if (!f.exists()) { LOG.error("" + f + " doesn't exist"); return; } List<String> lines = FileUtils.readLines(f); /*String combined = String.join("\n", lines); LOG.info("Going to run SQL"); LOG.info(combined);*/ //load driver Class.forName("com.mysql.cj.jdbc.Driver"); //create connection Properties props = new Properties(); props.setProperty("user", username); props.setProperty("password", password); conn = DriverManager.getConnection(host, props); LOG.info("Opened connection"); statement = conn.createStatement(); long totalStart = System.currentTimeMillis(); for (String sql : lines) { sql = sql.trim(); if (sql.startsWith("--") || sql.startsWith("/*") || Strings.isNullOrEmpty(sql)) { continue; } LOG.info(""); LOG.info(sql); long start = System.currentTimeMillis(); boolean hasResultSet = statement.execute(sql); long end = System.currentTimeMillis(); LOG.info("SQL took " + (end - start) + "ms"); if (hasResultSet) { while (true) { ResultSet rs = statement.getResultSet(); int cols = rs.getMetaData().getColumnCount(); List<String> colHeaders = new ArrayList<>(); for (int i = 0; i < cols; i++) { String header = rs.getMetaData().getColumnName(i + 1); colHeaders.add(header); } String colHeaderStr = String.join(", ", colHeaders); LOG.info(colHeaderStr); while (rs.next()) { List<String> row = new ArrayList<>(); for (int i = 0; i < cols; i++) { Object o = rs.getObject(i + 1); if (rs.wasNull()) { row.add("<null>"); } else { row.add(o.toString()); } } String rowStr = String.join(", ", row); LOG.info(rowStr); } if (!statement.getMoreResults()) { break; } } } else { int updateCount = statement.getUpdateCount(); LOG.info("Updated " + updateCount + " Row(s)"); } } long totalEnd = System.currentTimeMillis(); LOG.info(""); LOG.info("Total time taken " + (totalEnd - totalStart) + "ms"); } catch (Throwable t) { LOG.error("", t); } finally { if (statement != null) { try { statement.close(); } catch (Exception ex) { } } if (conn != null) { try { conn.close(); } catch (Exception ex) { } } LOG.info("Closed connection"); } LOG.info("Finished Testing DB Size Limit"); } /*private static void fixExchangeBatches() { LOG.info("Starting Fixing Exchange Batches"); try { ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); List<Service> services = serviceDalI.getAll(); for (Service service: services) { LOG.info("Doing " + service.getName()); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(service.getId()); for (UUID exchangeId: exchangeIds) { LOG.info(" Exchange " + exchangeId); List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } List<ResourceWrapper> resources = resourceDalI.getResourcesForBatch(exchangeBatch.getBatchId()); if (resources.isEmpty()) { continue; } ResourceWrapper first = resources.get(0); UUID patientId = first.getPatientId(); if (patientId != null) { exchangeBatch.setEdsPatientId(patientId); exchangeBatchDalI.save(exchangeBatch); LOG.info("Fixed batch " + exchangeBatch.getBatchId() + " -> " + exchangeBatch.getEdsPatientId()); } } } } LOG.info("Finished Fixing Exchange Batches"); } catch (Exception ex) { LOG.error("", ex); } }*/ /** * exports ADT Encounters for patients based on a CSV file produced using the below SQL * --USE EDS DATABASE * <p> * -- barts b5a08769-cbbe-4093-93d6-b696cd1da483 * -- homerton 962d6a9a-5950-47ac-9e16-ebee56f9507a * <p> * create table adt_patients ( * service_id character(36), * system_id character(36), * nhs_number character varying(10), * patient_id character(36) * ); * <p> * -- delete from adt_patients; * <p> * select * from patient_search limit 10; * select * from patient_link limit 10; * <p> * insert into adt_patients * select distinct ps.service_id, ps.system_id, ps.nhs_number, ps.patient_id * from patient_search ps * join patient_link pl * on pl.patient_id = ps.patient_id * join patient_link pl2 * on pl.person_id = pl2.person_id * join patient_search ps2 * on ps2.patient_id = pl2.patient_id * where * ps.service_id IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a') * and ps2.service_id NOT IN ('b5a08769-cbbe-4093-93d6-b696cd1da483', '962d6a9a-5950-47ac-9e16-ebee56f9507a'); * <p> * <p> * select count(1) from adt_patients limit 100; * select * from adt_patients limit 100; * <p> * <p> * <p> * <p> * ---MOVE TABLE TO HL7 RECEIVER DB * <p> * select count(1) from adt_patients; * <p> * -- top 1000 patients with messages * <p> * select * from mapping.resource_uuid where resource_type = 'Patient' limit 10; * <p> * select * from log.message limit 10; * <p> * create table adt_patient_counts ( * nhs_number character varying(100), * count int * ); * <p> * insert into adt_patient_counts * select pid1, count(1) * from log.message * where pid1 is not null * and pid1 <> '' * group by pid1; * <p> * select * from adt_patient_counts order by count desc limit 100; * <p> * alter table adt_patients * add count int; * <p> * update adt_patients * set count = adt_patient_counts.count * from adt_patient_counts * where adt_patients.nhs_number = adt_patient_counts.nhs_number; * <p> * select count(1) from adt_patients where nhs_number is null; * <p> * select * from adt_patients * where nhs_number is not null * and count is not null * order by count desc limit 1000; */ /*private static void exportHl7Encounters(String sourceCsvPath, String outputPath) { LOG.info("Exporting HL7 Encounters from " + sourceCsvPath + " to " + outputPath); try { File sourceFile = new File(sourceCsvPath); CSVParser csvParser = CSVParser.parse(sourceFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); //"service_id","system_id","nhs_number","patient_id","count" int count = 0; HashMap<UUID, List<UUID>> serviceAndSystemIds = new HashMap<>(); HashMap<UUID, Integer> patientIds = new HashMap<>(); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); count ++; String serviceId = csvRecord.get("service_id"); String systemId = csvRecord.get("system_id"); String patientId = csvRecord.get("patient_id"); UUID serviceUuid = UUID.fromString(serviceId); List<UUID> systemIds = serviceAndSystemIds.get(serviceUuid); if (systemIds == null) { systemIds = new ArrayList<>(); serviceAndSystemIds.put(serviceUuid, systemIds); } systemIds.add(UUID.fromString(systemId)); patientIds.put(UUID.fromString(patientId), new Integer(count)); } csvParser.close(); ExchangeDalI exchangeDalI = DalProvider.factoryExchangeDal(); ResourceDalI resourceDalI = DalProvider.factoryResourceDal(); ExchangeBatchDalI exchangeBatchDalI = DalProvider.factoryExchangeBatchDal(); ServiceDalI serviceDalI = DalProvider.factoryServiceDal(); ParserPool parser = new ParserPool(); Map<Integer, List<Object[]>> patientRows = new HashMap<>(); SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); for (UUID serviceId: serviceAndSystemIds.keySet()) { //List<UUID> systemIds = serviceAndSystemIds.get(serviceId); Service service = serviceDalI.getById(serviceId); String serviceName = service.getName(); LOG.info("Doing service " + serviceId + " " + serviceName); List<UUID> exchangeIds = exchangeDalI.getExchangeIdsForService(serviceId); LOG.info("Got " + exchangeIds.size() + " exchange IDs to scan"); int exchangeCount = 0; for (UUID exchangeId: exchangeIds) { exchangeCount ++; if (exchangeCount % 1000 == 0) { LOG.info("Done " + exchangeCount + " exchanges"); } List<ExchangeBatch> exchangeBatches = exchangeBatchDalI.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch: exchangeBatches) { UUID patientId = exchangeBatch.getEdsPatientId(); if (patientId != null && !patientIds.containsKey(patientId)) { continue; } Integer patientIdInt = patientIds.get(patientId); //get encounters for exchange batch UUID batchId = exchangeBatch.getBatchId(); List<ResourceWrapper> resourceWrappers = resourceDalI.getResourcesForBatch(serviceId, batchId); for (ResourceWrapper resourceWrapper: resourceWrappers) { if (resourceWrapper.isDeleted()) { continue; } String resourceType = resourceWrapper.getResourceType(); if (!resourceType.equals(ResourceType.Encounter.toString())) { continue; } LOG.info("Processing " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId()); String json = resourceWrapper.getResourceData(); Encounter fhirEncounter = (Encounter)parser.parse(json); Date date = null; if (fhirEncounter.hasPeriod()) { Period period = fhirEncounter.getPeriod(); if (period.hasStart()) { date = period.getStart(); } } String episodeId = null; if (fhirEncounter.hasEpisodeOfCare()) { Reference episodeReference = fhirEncounter.getEpisodeOfCare().get(0); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(episodeReference); EpisodeOfCare fhirEpisode = (EpisodeOfCare)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirEpisode != null) { if (fhirEpisode.hasIdentifier()) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_BARTS_FIN_EPISODE_ID); if (Strings.isNullOrEmpty(episodeId)) { episodeId = IdentifierHelper.findIdentifierValue(fhirEpisode.getIdentifier(), FhirUri.IDENTIFIER_SYSTEM_HOMERTON_FIN_EPISODE_ID); } } } } String adtType = null; String adtCode = null; Extension extension = ExtensionConverter.findExtension(fhirEncounter, FhirExtensionUri.HL7_MESSAGE_TYPE); if (extension != null) { CodeableConcept codeableConcept = (CodeableConcept) extension.getValue(); Coding hl7MessageTypeCoding = CodeableConceptHelper.findCoding(codeableConcept, FhirUri.CODE_SYSTEM_HL7V2_MESSAGE_TYPE); if (hl7MessageTypeCoding != null) { adtType = hl7MessageTypeCoding.getDisplay(); adtCode = hl7MessageTypeCoding.getCode(); } } else { //for older formats of the transformed resources, the HL7 message type can only be found from the raw original exchange body try { Exchange exchange = exchangeDalI.getExchange(exchangeId); String exchangeBody = exchange.getBody(); Bundle bundle = (Bundle) FhirResourceHelper.deserialiseResouce(exchangeBody); for (Bundle.BundleEntryComponent entry: bundle.getEntry()) { if (entry.getResource() != null && entry.getResource() instanceof MessageHeader) { MessageHeader header = (MessageHeader)entry.getResource(); if (header.hasEvent()) { Coding coding = header.getEvent(); adtType = coding.getDisplay(); adtCode = coding.getCode(); } } } } catch (Exception ex) { //if the exchange body isn't a FHIR bundle, then we'll get an error by treating as such, so just ignore them } } String cls = null; if (fhirEncounter.hasClass_()) { Encounter.EncounterClass encounterClass = fhirEncounter.getClass_(); if (encounterClass == Encounter.EncounterClass.OTHER && fhirEncounter.hasClass_Element() && fhirEncounter.getClass_Element().hasExtension()) { for (Extension classExtension: fhirEncounter.getClass_Element().getExtension()) { if (classExtension.getUrl().equals(FhirExtensionUri.ENCOUNTER_CLASS)) { //not 100% of the type of the value, so just append to a String cls = "" + classExtension.getValue(); } } } if (Strings.isNullOrEmpty(cls)) { cls = encounterClass.toCode(); } } String type = null; if (fhirEncounter.hasType()) { //only seem to ever have one type CodeableConcept codeableConcept = fhirEncounter.getType().get(0); type = codeableConcept.getText(); } String status = null; if (fhirEncounter.hasStatus()) { Encounter.EncounterState encounterState = fhirEncounter.getStatus(); status = encounterState.toCode(); } String location = null; String locationType = null; if (fhirEncounter.hasLocation()) { //first location is always the current location Encounter.EncounterLocationComponent encounterLocation = fhirEncounter.getLocation().get(0); if (encounterLocation.hasLocation()) { Reference locationReference = encounterLocation.getLocation(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(locationReference); Location fhirLocation = (Location)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirLocation != null) { if (fhirLocation.hasName()) { location = fhirLocation.getName(); } if (fhirLocation.hasType()) { CodeableConcept typeCodeableConcept = fhirLocation.getType(); if (typeCodeableConcept.hasCoding()) { Coding coding = typeCodeableConcept.getCoding().get(0); locationType = coding.getDisplay(); } } } } } String clinician = null; if (fhirEncounter.hasParticipant()) { //first participant seems to be the interesting one Encounter.EncounterParticipantComponent encounterParticipant = fhirEncounter.getParticipant().get(0); if (encounterParticipant.hasIndividual()) { Reference practitionerReference = encounterParticipant.getIndividual(); ReferenceComponents comps = ReferenceHelper.getReferenceComponents(practitionerReference); Practitioner fhirPractitioner = (Practitioner)resourceDalI.getCurrentVersionAsResource(serviceId, comps.getResourceType(), comps.getId()); if (fhirPractitioner != null) { if (fhirPractitioner.hasName()) { HumanName name = fhirPractitioner.getName(); clinician = name.getText(); if (Strings.isNullOrEmpty(clinician)) { clinician = ""; for (StringType s: name.getPrefix()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getGiven()) { clinician += s.getValueNotNull(); clinician += " "; } for (StringType s: name.getFamily()) { clinician += s.getValueNotNull(); clinician += " "; } clinician = clinician.trim(); } } } } } Object[] row = new Object[12]; row[0] = serviceName; row[1] = patientIdInt.toString(); row[2] = sdfOutput.format(date); row[3] = episodeId; row[4] = adtCode; row[5] = adtType; row[6] = cls; row[7] = type; row[8] = status; row[9] = location; row[10] = locationType; row[11] = clinician; List<Object[]> rows = patientRows.get(patientIdInt); if (rows == null) { rows = new ArrayList<>(); patientRows.put(patientIdInt, rows); } rows.add(row); } } } } String[] outputColumnHeaders = new String[] {"Source", "Patient", "Date", "Episode ID", "ADT Message Code", "ADT Message Type", "Class", "Type", "Status", "Location", "Location Type", "Clinician"}; FileWriter fileWriter = new FileWriter(outputPath); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader(outputColumnHeaders) .withQuote('"'); CSVPrinter csvPrinter = new CSVPrinter(bufferedWriter, format); for (int i=0; i <= count; i++) { Integer patientIdInt = new Integer(i); List<Object[]> rows = patientRows.get(patientIdInt); if (rows != null) { for (Object[] row: rows) { csvPrinter.printRecord(row); } } } csvPrinter.close(); bufferedWriter.close(); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Exporting Encounters from " + sourceCsvPath + " to " + outputPath); }*/ /*private static void registerShutdownHook() { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { LOG.info(""); try { Thread.sleep(5000); } catch (Throwable ex) { LOG.error("", ex); } LOG.info("Done"); } }); }*/ /*private static void findEmisStartDates(String path, String outputPath) { LOG.info("Finding EMIS Start Dates in " + path + ", writing to " + outputPath); try { SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH.mm.ss"); Map<String, Date> startDates = new HashMap<>(); Map<String, String> servers = new HashMap<>(); Map<String, String> names = new HashMap<>(); Map<String, String> odsCodes = new HashMap<>(); Map<String, String> cdbNumbers = new HashMap<>(); Map<String, Set<String>> distinctPatients = new HashMap<>(); File root = new File(path); for (File sftpRoot : root.listFiles()) { LOG.info("Checking " + sftpRoot); Map<Date, File> extracts = new HashMap<>(); List<Date> extractDates = new ArrayList<>(); for (File extractRoot : sftpRoot.listFiles()) { Date d = sdf.parse(extractRoot.getName()); //LOG.info("" + extractRoot.getName() + " -> " + d); extracts.put(d, extractRoot); extractDates.add(d); } Collections.sort(extractDates); for (Date extractDate : extractDates) { File extractRoot = extracts.get(extractDate); LOG.info("Checking " + extractRoot); //read the sharing agreements file //e.g. 291_Agreements_SharingOrganisation_20150211164536_45E7CD20-EE37-41AB-90D6-DC9D4B03D102.csv File sharingAgreementsFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("agreements_sharingorganisation") > -1 && name.endsWith(".csv")) { sharingAgreementsFile = f; break; } } if (sharingAgreementsFile == null) { LOG.info("Null agreements file for " + extractRoot); continue; } CSVParser csvParser = CSVParser.parse(sharingAgreementsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String activated = csvRecord.get("IsActivated"); String disabled = csvRecord.get("Disabled"); servers.put(orgGuid, sftpRoot.getName()); if (activated.equalsIgnoreCase("true")) { if (disabled.equalsIgnoreCase("false")) { Date d = sdf.parse(extractRoot.getName()); Date existingDate = startDates.get(orgGuid); if (existingDate == null) { startDates.put(orgGuid, d); } } else { if (startDates.containsKey(orgGuid)) { startDates.put(orgGuid, null); } } } } } finally { csvParser.close(); } //go through orgs file to get name, ods and cdb codes File orgsFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_organisation_") > -1 && name.endsWith(".csv")) { orgsFile = f; break; } } csvParser = CSVParser.parse(orgsFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String name = csvRecord.get("OrganisationName"); String odsCode = csvRecord.get("ODSCode"); String cdb = csvRecord.get("CDB"); names.put(orgGuid, name); odsCodes.put(orgGuid, odsCode); cdbNumbers.put(orgGuid, cdb); } } finally { csvParser.close(); } //go through patients file to get count File patientFile = null; for (File f : extractRoot.listFiles()) { String name = f.getName().toLowerCase(); if (name.indexOf("admin_patient_") > -1 && name.endsWith(".csv")) { patientFile = f; break; } } csvParser = CSVParser.parse(patientFile, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); try { Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String orgGuid = csvRecord.get("OrganisationGuid"); String patientGuid = csvRecord.get("PatientGuid"); String deleted = csvRecord.get("Deleted"); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); if (distinctPatientSet == null) { distinctPatientSet = new HashSet<>(); distinctPatients.put(orgGuid, distinctPatientSet); } if (deleted.equalsIgnoreCase("true")) { distinctPatientSet.remove(patientGuid); } else { distinctPatientSet.add(patientGuid); } } } finally { csvParser.close(); } } } SimpleDateFormat sdfOutput = new SimpleDateFormat("yyyy-MM-dd"); StringBuilder sb = new StringBuilder(); sb.append("Name,OdsCode,CDB,OrgGuid,StartDate,Server,Patients"); for (String orgGuid : startDates.keySet()) { Date startDate = startDates.get(orgGuid); String server = servers.get(orgGuid); String name = names.get(orgGuid); String odsCode = odsCodes.get(orgGuid); String cdbNumber = cdbNumbers.get(orgGuid); Set<String> distinctPatientSet = distinctPatients.get(orgGuid); String startDateDesc = null; if (startDate != null) { startDateDesc = sdfOutput.format(startDate); } Long countDistinctPatients = null; if (distinctPatientSet != null) { countDistinctPatients = new Long(distinctPatientSet.size()); } sb.append("\n"); sb.append("\"" + name + "\""); sb.append(","); sb.append("\"" + odsCode + "\""); sb.append(","); sb.append("\"" + cdbNumber + "\""); sb.append(","); sb.append("\"" + orgGuid + "\""); sb.append(","); sb.append(startDateDesc); sb.append(","); sb.append("\"" + server + "\""); sb.append(","); sb.append(countDistinctPatients); } LOG.info(sb.toString()); FileUtils.writeStringToFile(new File(outputPath), sb.toString()); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Finding Start Dates in " + path + ", writing to " + outputPath); } private static void findEncounterTerms(String path, String outputPath) { LOG.info("Finding Encounter Terms from " + path); Map<String, Long> hmResults = new HashMap<>(); //source term, source term snomed ID, source term snomed term - count try { File root = new File(path); File[] files = root.listFiles(); for (File readerRoot : files) { //emis001 LOG.info("Finding terms in " + readerRoot); //first read in all the coding files to build up our map of codes Map<String, String> hmCodes = new HashMap<>(); for (File dateFolder : readerRoot.listFiles()) { LOG.info("Looking for codes in " + dateFolder); File f = findFile(dateFolder, "Coding_ClinicalCode"); if (f == null) { LOG.error("Failed to find coding file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String codeId = csvRecord.get("CodeId"); String term = csvRecord.get("Term"); String snomed = csvRecord.get("SnomedCTConceptId"); hmCodes.put(codeId, snomed + ",\"" + term + "\""); } csvParser.close(); } SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd"); Date cutoff = dateFormat.parse("2017-01-01"); //now process the consultation files themselves for (File dateFolder : readerRoot.listFiles()) { LOG.info("Looking for consultations in " + dateFolder); File f = findFile(dateFolder, "CareRecord_Consultation"); if (f == null) { LOG.error("Failed to find consultation file in " + dateFolder.getAbsolutePath()); continue; } CSVParser csvParser = CSVParser.parse(f, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); Iterator<CSVRecord> csvIterator = csvParser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String term = csvRecord.get("ConsultationSourceTerm"); String codeId = csvRecord.get("ConsultationSourceCodeId"); if (Strings.isNullOrEmpty(term) && Strings.isNullOrEmpty(codeId)) { continue; } String date = csvRecord.get("EffectiveDate"); if (Strings.isNullOrEmpty(date)) { continue; } Date d = dateFormat.parse(date); if (d.before(cutoff)) { continue; } String line = "\"" + term + "\","; if (!Strings.isNullOrEmpty(codeId)) { String codeLookup = hmCodes.get(codeId); if (codeLookup == null) { LOG.error("Failed to find lookup for codeID " + codeId); continue; } line += codeLookup; } else { line += ","; } Long count = hmResults.get(line); if (count == null) { count = new Long(1); } else { count = new Long(count.longValue() + 1); } hmResults.put(line, count); } csvParser.close(); } } //save results to file StringBuilder output = new StringBuilder(); output.append("\"consultation term\",\"snomed concept ID\",\"snomed term\",\"count\""); output.append("\r\n"); for (String line : hmResults.keySet()) { Long count = hmResults.get(line); String combined = line + "," + count; output.append(combined); output.append("\r\n"); } LOG.info("FInished"); LOG.info(output.toString()); FileUtils.writeStringToFile(new File(outputPath), output.toString()); LOG.info("written output to " + outputPath); } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished finding Encounter Terms from " + path); } private static File findFile(File root, String token) throws Exception { for (File f : root.listFiles()) { String s = f.getName(); if (s.indexOf(token) > -1) { return f; } } return null; }*/ /*private static void populateProtocolQueue(String serviceIdStr, String startingExchangeId) { LOG.info("Starting Populating Protocol Queue for " + serviceIdStr); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); if (serviceIdStr.equalsIgnoreCase("All")) { serviceIdStr = null; } try { List<Service> services = new ArrayList<>(); if (Strings.isNullOrEmpty(serviceIdStr)) { services = serviceRepository.getAll(); } else { UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); services.add(service); } for (Service service: services) { List<UUID> exchangeIds = auditRepository.getExchangeIdsForService(service.getId()); LOG.info("Found " + exchangeIds.size() + " exchangeIds for " + service.getName()); if (startingExchangeId != null) { UUID startingExchangeUuid = UUID.fromString(startingExchangeId); if (exchangeIds.contains(startingExchangeUuid)) { //if in the list, remove everything up to and including the starting exchange int index = exchangeIds.indexOf(startingExchangeUuid); LOG.info("Found starting exchange " + startingExchangeId + " at " + index + " so removing up to this point"); for (int i=index; i>=0; i--) { exchangeIds.remove(i); } startingExchangeId = null; } else { //if not in the list, skip all these exchanges LOG.info("List doesn't contain starting exchange " + startingExchangeId + " so skipping"); continue; } } QueueHelper.postToExchange(exchangeIds, "edsProtocol", null, true); } } catch (Exception ex) { LOG.error("", ex); } LOG.info("Finished Populating Protocol Queue for " + serviceIdStr); }*/ /*private static void findDeletedOrgs() { LOG.info("Starting finding deleted orgs"); ServiceDalI serviceRepository = DalProvider.factoryServiceDal(); ExchangeDalI auditRepository = DalProvider.factoryExchangeDal(); List<Service> services = new ArrayList<>(); try { for (Service service: serviceRepository.getAll()) { services.add(service); } } catch (Exception ex) { LOG.error("", ex); } services.sort((o1, o2) -> { String name1 = o1.getName(); String name2 = o2.getName(); return name1.compareToIgnoreCase(name2); }); for (Service service: services) { try { UUID serviceUuid = service.getId(); List<Exchange> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 1, new Date(0), new Date()); LOG.info("Service: " + service.getName() + " " + service.getLocalId()); if (exchangeByServices.isEmpty()) { LOG.info(" no exchange found!"); continue; } Exchange exchangeByService = exchangeByServices.get(0); UUID exchangeId = exchangeByService.getId(); Exchange exchange = auditRepository.getExchange(exchangeId); Map<String, String> headers = exchange.getHeaders(); String systemUuidStr = headers.get(HeaderKeys.SenderSystemUuid); UUID systemUuid = UUID.fromString(systemUuidStr); int batches = countBatches(exchangeId, serviceUuid, systemUuid); LOG.info(" Most recent exchange had " + batches + " batches"); if (batches > 1 && batches < 2000) { continue; } //go back until we find the FIRST exchange where it broke exchangeByServices = auditRepository.getExchangesByService(serviceUuid, 250, new Date(0), new Date()); for (int i=0; i<exchangeByServices.size(); i++) { exchangeByService = exchangeByServices.get(i); exchangeId = exchangeByService.getId(); batches = countBatches(exchangeId, serviceUuid, systemUuid); exchange = auditRepository.getExchange(exchangeId); Date timestamp = exchange.getTimestamp(); if (batches < 1 || batches > 2000) { LOG.info(" " + timestamp + " had " + batches); } if (batches > 1 && batches < 2000) { LOG.info(" " + timestamp + " had " + batches); break; } } } catch (Exception ex) { LOG.error("", ex); } } LOG.info("Finished finding deleted orgs"); }*/ private static int countBatches(UUID exchangeId, UUID serviceId, UUID systemId) throws Exception { int batches = 0; ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); List<ExchangeTransformAudit> audits = exchangeDal.getAllExchangeTransformAudits(serviceId, systemId, exchangeId); for (ExchangeTransformAudit audit : audits) { if (audit.getNumberBatchesCreated() != null) { batches += audit.getNumberBatchesCreated(); } } return batches; } /*private static void fixExchanges(UUID justThisService) { LOG.info("Fixing exchanges"); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId : exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } boolean changed = false; String body = exchange.getBody(); String[] files = body.split("\n"); if (files.length == 0) { continue; } for (int i=0; i<files.length; i++) { String original = files[i]; //remove /r characters String trimmed = original.trim(); //add the new prefix if (!trimmed.startsWith("sftpreader/EMIS001/")) { trimmed = "sftpreader/EMIS001/" + trimmed; } if (!original.equals(trimmed)) { files[i] = trimmed; changed = true; } } if (changed) { LOG.info("Fixed exchange " + exchangeId); LOG.info(body); body = String.join("\n", files); exchange.setBody(body); AuditWriter.writeExchange(exchange); } } } LOG.info("Fixed exchanges"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void deleteDataForService(UUID serviceId) { Service dbService = new ServiceRepository().getById(serviceId); //the delete will take some time, so do the delete in a separate thread LOG.info("Deleting all data for service " + dbService.getName() + " " + dbService.getId()); FhirDeletionService deletor = new FhirDeletionService(dbService); try { deletor.deleteData(); LOG.info("Completed deleting all data for service " + dbService.getName() + " " + dbService.getId()); } catch (Exception ex) { LOG.error("Error deleting service " + dbService.getName() + " " + dbService.getId(), ex); } }*/ /*private static void fixProblems(UUID serviceId, String sharedStoragePath, boolean testMode) { LOG.info("Fixing problems for service " + serviceId); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); List<ExchangeByService> exchangeByServiceList = auditRepository.getExchangesByService(serviceId, Integer.MAX_VALUE); //go backwards as the most recent is first for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); UUID exchangeId = exchangeByService.getExchangeId(); LOG.info("Doing exchange " + exchangeId); EmisCsvHelper helper = null; try { Exchange exchange = AuditWriter.readExchange(exchangeId); String exchangeBody = exchange.getBody(); String[] files = exchangeBody.split(java.lang.System.lineSeparator()); File orgDirectory = validateAndFindCommonDirectory(sharedStoragePath, files); Map<Class, AbstractCsvParser> allParsers = new HashMap<>(); String properVersion = null; String[] versions = new String[]{EmisCsvToFhirTransformer.VERSION_5_0, EmisCsvToFhirTransformer.VERSION_5_1, EmisCsvToFhirTransformer.VERSION_5_3, EmisCsvToFhirTransformer.VERSION_5_4}; for (String version: versions) { try { List<AbstractCsvParser> parsers = new ArrayList<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(Observation.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(DrugRecord.class, orgDirectory, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(IssueRecord.class, orgDirectory, version, true, parsers); for (AbstractCsvParser parser: parsers) { Class cls = parser.getClass(); allParsers.put(cls, parser); } properVersion = version; } catch (Exception ex) { //ignore } } if (allParsers.isEmpty()) { throw new Exception("Failed to open parsers for exchange " + exchangeId + " in folder " + orgDirectory); } UUID systemId = exchange.getHeaderAsUuid(HeaderKeys.SenderSystemUuid); //FhirResourceFiler dummyFiler = new FhirResourceFiler(exchangeId, serviceId, systemId, null, null, 10); if (helper == null) { helper = new EmisCsvHelper(findDataSharingAgreementGuid(new ArrayList<>(allParsers.values()))); } ObservationPreTransformer.transform(properVersion, allParsers, null, helper); IssueRecordPreTransformer.transform(properVersion, allParsers, null, helper); DrugRecordPreTransformer.transform(properVersion, allParsers, null, helper); Map<String, List<String>> problemChildren = helper.getProblemChildMap(); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (Map.Entry<String, List<String>> entry : problemChildren.entrySet()) { String patientLocallyUniqueId = entry.getKey().split(":")[0]; UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientLocallyUniqueId); if (edsPatientId == null) { throw new Exception("Failed to find edsPatientId for local Patient ID " + patientLocallyUniqueId + " in exchange " + exchangeId); } //find the batch ID for our patient UUID batchId = null; for (ExchangeBatch exchangeBatch: exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null && exchangeBatch.getEdsPatientId().equals(edsPatientId)) { batchId = exchangeBatch.getBatchId(); break; } } if (batchId == null) { throw new Exception("Failed to find batch ID for eds Patient ID " + edsPatientId + " in exchange " + exchangeId); } //find the EDS ID for our problem UUID edsProblemId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Condition, entry.getKey()); if (edsProblemId == null) { LOG.warn("No edsProblemId found for local ID " + entry.getKey() + " - assume bad data referring to non-existing problem?"); //throw new Exception("Failed to find edsProblemId for local Patient ID " + problemLocallyUniqueId + " in exchange " + exchangeId); } //convert our child IDs to EDS references List<Reference> references = new ArrayList<>(); HashSet<String> contentsSet = new HashSet<>(); contentsSet.addAll(entry.getValue()); for (String referenceValue : contentsSet) { Reference reference = ReferenceHelper.createReference(referenceValue); ReferenceComponents components = ReferenceHelper.getReferenceComponents(reference); String locallyUniqueId = components.getId(); ResourceType resourceType = components.getResourceType(); UUID edsResourceId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); Reference globallyUniqueReference = ReferenceHelper.createReference(resourceType, edsResourceId.toString()); references.add(globallyUniqueReference); } //find the resource for the problem itself ResourceByExchangeBatch problemResourceByExchangeBatch = null; List<ResourceByExchangeBatch> resources = resourceRepository.getResourcesForBatch(batchId, ResourceType.Condition.toString()); for (ResourceByExchangeBatch resourceByExchangeBatch: resources) { if (resourceByExchangeBatch.getResourceId().equals(edsProblemId)) { problemResourceByExchangeBatch = resourceByExchangeBatch; break; } } if (problemResourceByExchangeBatch == null) { throw new Exception("Problem not found for edsProblemId " + edsProblemId + " for exchange " + exchangeId); } if (problemResourceByExchangeBatch.getIsDeleted()) { LOG.warn("Problem " + edsProblemId + " is deleted, so not adding to it for exchange " + exchangeId); continue; } String json = problemResourceByExchangeBatch.getResourceData(); Condition fhirProblem = (Condition)PARSER_POOL.parse(json); //update the problems if (fhirProblem.hasContained()) { if (fhirProblem.getContained().size() > 1) { throw new Exception("Problem " + edsProblemId + " is has " + fhirProblem.getContained().size() + " contained resources for exchange " + exchangeId); } fhirProblem.getContained().clear(); } List_ list = new List_(); list.setId("Items"); fhirProblem.getContained().add(list); Extension extension = ExtensionConverter.findExtension(fhirProblem, FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE); if (extension == null) { Reference listReference = ReferenceHelper.createInternalReference("Items"); fhirProblem.addExtension(ExtensionConverter.createExtension(FhirExtensionUri.PROBLEM_ASSOCIATED_RESOURCE, listReference)); } for (Reference reference : references) { list.addEntry().setItem(reference); } String newJson = FhirSerializationHelper.serializeResource(fhirProblem); if (newJson.equals(json)) { LOG.warn("Skipping edsProblemId " + edsProblemId + " as JSON hasn't changed"); continue; } problemResourceByExchangeBatch.setResourceData(newJson); String resourceType = problemResourceByExchangeBatch.getResourceType(); UUID versionUuid = problemResourceByExchangeBatch.getVersion(); ResourceHistory problemResourceHistory = resourceRepository.getResourceHistoryByKey(edsProblemId, resourceType, versionUuid); problemResourceHistory.setResourceData(newJson); problemResourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(newJson)); ResourceByService problemResourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType, edsProblemId); if (problemResourceByService.getResourceData() == null) { problemResourceByService = null; LOG.warn("Not updating edsProblemId " + edsProblemId + " for exchange " + exchangeId + " as it's been subsequently delrted"); } else { problemResourceByService.setResourceData(newJson); } //save back to THREE tables if (!testMode) { resourceRepository.save(problemResourceByExchangeBatch); resourceRepository.save(problemResourceHistory); if (problemResourceByService != null) { resourceRepository.save(problemResourceByService); } LOG.info("Fixed edsProblemId " + edsProblemId + " for exchange Id " + exchangeId); } else { LOG.info("Would change edsProblemId " + edsProblemId + " to new JSON"); LOG.info(newJson); } } } catch (Exception ex) { LOG.error("Failed on exchange " + exchangeId, ex); break; } } LOG.info("Finished fixing problems for service " + serviceId); } private static String findDataSharingAgreementGuid(List<AbstractCsvParser> parsers) throws Exception { //we need a file name to work out the data sharing agreement ID, so just the first file we can find File f = parsers .iterator() .next() .getFile(); String name = Files.getNameWithoutExtension(f.getName()); String[] toks = name.split("_"); if (toks.length != 5) { throw new TransformException("Failed to extract data sharing agreement GUID from filename " + f.getName()); } return toks[4]; } private static void closeParsers(Collection<AbstractCsvParser> parsers) { for (AbstractCsvParser parser : parsers) { try { parser.close(); } catch (IOException ex) { //don't worry if this fails, as we're done anyway } } } private static File validateAndFindCommonDirectory(String sharedStoragePath, String[] files) throws Exception { String organisationDir = null; for (String file: files) { File f = new File(sharedStoragePath, file); if (!f.exists()) { LOG.error("Failed to find file {} in shared storage {}", file, sharedStoragePath); throw new FileNotFoundException("" + f + " doesn't exist"); } //LOG.info("Successfully found file {} in shared storage {}", file, sharedStoragePath); try { File orgDir = f.getParentFile(); if (organisationDir == null) { organisationDir = orgDir.getAbsolutePath(); } else { if (!organisationDir.equalsIgnoreCase(orgDir.getAbsolutePath())) { throw new Exception(); } } } catch (Exception ex) { throw new FileNotFoundException("" + f + " isn't in the expected directory structure within " + organisationDir); } } return new File(organisationDir); }*/ /*private static void testLogging() { while (true) { System.out.println("Checking logging at " + System.currentTimeMillis()); try { Thread.sleep(4000); } catch (Exception e) { e.printStackTrace(); } LOG.trace("trace logging"); LOG.debug("debug logging"); LOG.info("info logging"); LOG.warn("warn logging"); LOG.error("error logging"); } } */ /*private static void fixExchangeProtocols() { LOG.info("Fixing exchange protocols"); AuditRepository auditRepository = new AuditRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.Exchange LIMIT 1000;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); LOG.info("Processing exchange " + exchangeId); Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); List<String> newIds = new ArrayList<>(); String protocolJson = headers.get(HeaderKeys.Protocols); if (!headers.containsKey(HeaderKeys.Protocols)) { try { List<LibraryItem> libraryItemList = LibraryRepositoryHelper.getProtocolsByServiceId(serviceIdStr); // Get protocols where service is publisher newIds = libraryItemList.stream() .filter( libraryItem -> libraryItem.getProtocol().getServiceContract().stream() .anyMatch(sc -> sc.getType().equals(ServiceContractType.PUBLISHER) && sc.getService().getUuid().equals(serviceIdStr))) .map(t -> t.getUuid().toString()) .collect(Collectors.toList()); } catch (Exception e) { LOG.error("Failed to find protocols for exchange " + exchange.getExchangeId(), e); continue; } } else { try { JsonNode node = ObjectMapperPool.getInstance().readTree(protocolJson); for (int i = 0; i < node.size(); i++) { JsonNode libraryItemNode = node.get(i); JsonNode idNode = libraryItemNode.get("uuid"); String id = idNode.asText(); newIds.add(id); } } catch (Exception e) { LOG.error("Failed to read Json from " + protocolJson + " for exchange " + exchange.getExchangeId(), e); continue; } } try { if (newIds.isEmpty()) { headers.remove(HeaderKeys.Protocols); } else { String protocolsJson = ObjectMapperPool.getInstance().writeValueAsString(newIds.toArray()); headers.put(HeaderKeys.Protocols, protocolsJson); } } catch (JsonProcessingException e) { LOG.error("Unable to serialize protocols to JSON for exchange " + exchange.getExchangeId(), e); continue; } try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); } catch (JsonProcessingException e) { LOG.error("Failed to write exchange headers to Json for exchange " + exchange.getExchangeId(), e); continue; } auditRepository.save(exchange); } LOG.info("Finished fixing exchange protocols"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } if (headers.containsKey(HeaderKeys.SenderLocalIdentifier) && headers.containsKey(HeaderKeys.SenderOrganisationUuid)) { continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); Map<UUID, String> orgMap = service.getOrganisations(); if (orgMap.size() != 1) { LOG.error("Wrong number of orgs in service " + serviceId + " for exchange " + exchange.getExchangeId()); continue; } UUID orgId = orgMap .keySet() .stream() .collect(StreamExtension.firstOrNullCollector()); Organisation organisation = organisationRepository.getById(orgId); String odsCode = organisation.getNationalId(); headers.put(HeaderKeys.SenderLocalIdentifier, odsCode); headers.put(HeaderKeys.SenderOrganisationUuid, orgId.toString()); try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Creating exchange " + exchange.getExchangeId()); } LOG.info("Finished fixing exchange headers"); }*/ /*private static void fixExchangeHeaders() { LOG.info("Fixing exchange headers"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); OrganisationRepository organisationRepository = new OrganisationRepository(); LibraryRepository libraryRepository = new LibraryRepository(); List<Exchange> exchanges = new AuditRepository().getAllExchanges(); for (Exchange exchange: exchanges) { String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception ex) { LOG.error("Failed to parse headers for exchange " + exchange.getExchangeId(), ex); continue; } String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); if (Strings.isNullOrEmpty(serviceIdStr)) { LOG.error("Failed to find service ID for exchange " + exchange.getExchangeId()); continue; } boolean changed = false; UUID serviceId = UUID.fromString(serviceIdStr); Service service = serviceRepository.getById(serviceId); try { List<JsonServiceInterfaceEndpoint> endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface : system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid())) { if (!headers.containsKey(HeaderKeys.SourceSystem)) { headers.put(HeaderKeys.SourceSystem, technicalInterface.getMessageFormat()); changed = true; } if (!headers.containsKey(HeaderKeys.SystemVersion)) { headers.put(HeaderKeys.SystemVersion, technicalInterface.getMessageFormatVersion()); changed = true; } if (!headers.containsKey(HeaderKeys.SenderSystemUuid)) { headers.put(HeaderKeys.SenderSystemUuid, endpointSystemId.toString()); changed = true; } } } } } catch (Exception e) { LOG.error("Failed to find endpoint details for " + exchange.getExchangeId()); continue; } if (changed) { try { headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setHeaders(headerJson); auditRepository.save(exchange); LOG.info("Fixed exchange " + exchange.getExchangeId()); } } LOG.info("Finished fixing exchange headers"); }*/ /*private static void testConnection(String configName) { try { JsonNode config = ConfigManager.getConfigurationAsJson(configName, "enterprise"); String driverClass = config.get("driverClass").asText(); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName(driverClass); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void testConnection() { try { JsonNode config = ConfigManager.getConfigurationAsJson("postgres", "enterprise"); String url = config.get("url").asText(); String username = config.get("username").asText(); String password = config.get("password").asText(); //force the driver to be loaded Class.forName("org.postgresql.Driver"); Connection conn = DriverManager.getConnection(url, username, password); conn.setAutoCommit(false); LOG.info("Connection ok"); conn.close(); } catch (Exception e) { LOG.error("", e); } }*/ /*private static void startEnterpriseStream(UUID serviceId, String configName, UUID exchangeIdStartFrom, UUID batchIdStartFrom) throws Exception { LOG.info("Starting Enterprise Streaming for " + serviceId + " using " + configName + " starting from exchange " + exchangeIdStartFrom + " and batch " + batchIdStartFrom); LOG.info("Testing database connection"); testConnection(configName); Service service = new ServiceRepository().getById(serviceId); List<UUID> orgIds = new ArrayList<>(service.getOrganisations().keySet()); UUID orgId = orgIds.get(0); List<ExchangeByService> exchangeByServiceList = new AuditRepository().getExchangesByService(serviceId, Integer.MAX_VALUE); for (int i=exchangeByServiceList.size()-1; i>=0; i--) { ExchangeByService exchangeByService = exchangeByServiceList.get(i); //for (ExchangeByService exchangeByService: exchangeByServiceList) { UUID exchangeId = exchangeByService.getExchangeId(); if (exchangeIdStartFrom != null) { if (!exchangeIdStartFrom.equals(exchangeId)) { continue; } else { //once we have a match, set to null so we don't skip any subsequent ones exchangeIdStartFrom = null; } } Exchange exchange = AuditWriter.readExchange(exchangeId); String senderOrgUuidStr = exchange.getHeader(HeaderKeys.SenderOrganisationUuid); UUID senderOrgUuid = UUID.fromString(senderOrgUuidStr); //this one had 90,000 batches and doesn't need doing again *//*if (exchangeId.equals(UUID.fromString("b9b93be0-afd8-11e6-8c16-c1d5a00342f3"))) { LOG.info("Skipping exchange " + exchangeId); continue; }*//* List<ExchangeBatch> exchangeBatches = new ExchangeBatchRepository().retrieveForExchangeId(exchangeId); LOG.info("Processing exchange " + exchangeId + " with " + exchangeBatches.size() + " batches"); for (int j=0; j<exchangeBatches.size(); j++) { ExchangeBatch exchangeBatch = exchangeBatches.get(j); UUID batchId = exchangeBatch.getBatchId(); if (batchIdStartFrom != null) { if (!batchIdStartFrom.equals(batchId)) { continue; } else { batchIdStartFrom = null; } } LOG.info("Processing exchange " + exchangeId + " and batch " + batchId + " " + (j+1) + "/" + exchangeBatches.size()); try { String outbound = FhirToEnterpriseCsvTransformer.transformFromFhir(senderOrgUuid, batchId, null); if (!Strings.isNullOrEmpty(outbound)) { EnterpriseFiler.file(outbound, configName); } } catch (Exception ex) { throw new PipelineException("Failed to process exchange " + exchangeId + " and batch " + batchId, ex); } } } }*/ /*private static void fixMissingExchanges() { LOG.info("Fixing missing exchanges"); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id, batch_id, inserted_at FROM ehr.exchange_batch LIMIT 600000;"); stmt.setFetchSize(100); Set<UUID> exchangeIdsDone = new HashSet<>(); AuditRepository auditRepository = new AuditRepository(); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); UUID batchId = row.get(1, UUID.class); Date date = row.getTimestamp(2); //LOG.info("Exchange " + exchangeId + " batch " + batchId + " date " + date); if (exchangeIdsDone.contains(exchangeId)) { continue; } if (auditRepository.getExchange(exchangeId) != null) { continue; } UUID serviceId = findServiceId(batchId, session); if (serviceId == null) { continue; } Exchange exchange = new Exchange(); ExchangeByService exchangeByService = new ExchangeByService(); ExchangeEvent exchangeEvent = new ExchangeEvent(); Map<String, String> headers = new HashMap<>(); headers.put(HeaderKeys.SenderServiceUuid, serviceId.toString()); String headersJson = null; try { headersJson = ObjectMapperPool.getInstance().writeValueAsString(headers); } catch (JsonProcessingException e) { //not throwing this exception further up, since it should never happen //and means we don't need to litter try/catches everywhere this is called from LOG.error("Failed to write exchange headers to Json", e); continue; } exchange.setBody("Body not available, as exchange re-created"); exchange.setExchangeId(exchangeId); exchange.setHeaders(headersJson); exchange.setTimestamp(date); exchangeByService.setExchangeId(exchangeId); exchangeByService.setServiceId(serviceId); exchangeByService.setTimestamp(date); exchangeEvent.setEventDesc("Created_By_Conversion"); exchangeEvent.setExchangeId(exchangeId); exchangeEvent.setTimestamp(new Date()); auditRepository.save(exchange); auditRepository.save(exchangeEvent); auditRepository.save(exchangeByService); exchangeIdsDone.add(exchangeId); LOG.info("Creating exchange " + exchangeId); } LOG.info("Finished exchange fix"); } private static UUID findServiceId(UUID batchId, Session session) { Statement stmt = new SimpleStatement("select resource_type, resource_id from ehr.resource_by_exchange_batch where batch_id = " + batchId + " LIMIT 1;"); ResultSet rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_by_exchange_batch for batch_id " + batchId); return null; } Row row = rs.one(); String resourceType = row.getString(0); UUID resourceId = row.get(1, UUID.class); stmt = new SimpleStatement("select service_id from ehr.resource_history where resource_type = '" + resourceType + "' and resource_id = " + resourceId + " LIMIT 1;"); rs = session.execute(stmt); if (rs.isExhausted()) { LOG.error("Failed to find resource_history for resource_type " + resourceType + " and resource_id " + resourceId); return null; } row = rs.one(); UUID serviceId = row.get(0, UUID.class); return serviceId; }*/ /*private static void fixExchangeEvents() { List<ExchangeEvent> events = new AuditRepository().getAllExchangeEvents(); for (ExchangeEvent event: events) { if (event.getEventDesc() != null) { continue; } String eventDesc = ""; int eventType = event.getEvent().intValue(); switch (eventType) { case 1: eventDesc = "Receive"; break; case 2: eventDesc = "Validate"; break; case 3: eventDesc = "Transform_Start"; break; case 4: eventDesc = "Transform_End"; break; case 5: eventDesc = "Send"; break; default: eventDesc = "??? " + eventType; } event.setEventDesc(eventDesc); new AuditRepository().save(null, event); } }*/ /*private static void fixExchanges() { AuditRepository auditRepository = new AuditRepository(); Map<UUID, Set<UUID>> existingOnes = new HashMap(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); List<Exchange> exchanges = auditRepository.getAllExchanges(); for (Exchange exchange: exchanges) { UUID exchangeUuid = exchange.getExchangeId(); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeUuid + " and Json " + headerJson); continue; } *//*String serviceId = headers.get(HeaderKeys.SenderServiceUuid); if (serviceId == null) { LOG.warn("No service ID found for exchange " + exchange.getExchangeId()); continue; } UUID serviceUuid = UUID.fromString(serviceId); Set<UUID> exchangeIdsDone = existingOnes.get(serviceUuid); if (exchangeIdsDone == null) { exchangeIdsDone = new HashSet<>(); List<ExchangeByService> exchangeByServices = auditRepository.getExchangesByService(serviceUuid, Integer.MAX_VALUE); for (ExchangeByService exchangeByService: exchangeByServices) { exchangeIdsDone.add(exchangeByService.getExchangeId()); } existingOnes.put(serviceUuid, exchangeIdsDone); } //create the exchange by service entity if (!exchangeIdsDone.contains(exchangeUuid)) { Date timestamp = exchange.getTimestamp(); ExchangeByService newOne = new ExchangeByService(); newOne.setExchangeId(exchangeUuid); newOne.setServiceId(serviceUuid); newOne.setTimestamp(timestamp); auditRepository.save(newOne); }*//* try { headers.remove(HeaderKeys.BatchIdsJson); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } if (!headers.containsKey(HeaderKeys.BatchIdsJson)) { //fix the batch IDs not being in the exchange List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeUuid); if (!batches.isEmpty()) { List<UUID> batchUuids = batches .stream() .map(t -> t.getBatchId()) .collect(Collectors.toList()); try { String batchUuidsStr = ObjectMapperPool.getInstance().writeValueAsString(batchUuids.toArray()); headers.put(HeaderKeys.BatchIdsJson, batchUuidsStr); String newHeaderJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(newHeaderJson); auditRepository.save(exchange, null); } catch (JsonProcessingException e) { LOG.error("Failed to populate batch IDs for exchange " + exchangeUuid, e); } } //} } }*/ /*private static UUID findSystemId(Service service, String software, String messageVersion) throws PipelineException { List<JsonServiceInterfaceEndpoint> endpoints = null; try { endpoints = ObjectMapperPool.getInstance().readValue(service.getEndpoints(), new TypeReference<List<JsonServiceInterfaceEndpoint>>() {}); for (JsonServiceInterfaceEndpoint endpoint: endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); String endpointInterfaceId = endpoint.getTechnicalInterfaceUuid().toString(); LibraryRepository libraryRepository = new LibraryRepository(); ActiveItem activeItem = libraryRepository.getActiveItemByItemId(endpointSystemId); Item item = libraryRepository.getItemByKey(endpointSystemId, activeItem.getAuditId()); LibraryItem libraryItem = QueryDocumentSerializer.readLibraryItemFromXml(item.getXmlContent()); System system = libraryItem.getSystem(); for (TechnicalInterface technicalInterface: system.getTechnicalInterface()) { if (endpointInterfaceId.equals(technicalInterface.getUuid()) && technicalInterface.getMessageFormat().equalsIgnoreCase(software) && technicalInterface.getMessageFormatVersion().equalsIgnoreCase(messageVersion)) { return endpointSystemId; } } } } catch (Exception e) { throw new PipelineException("Failed to process endpoints from service " + service.getId()); } return null; } */ /*private static void addSystemIdToExchangeHeaders() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid))) { LOG.info("Skipping exchange " + exchangeId + " as no service UUID"); continue; } if (!Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " as already got system UUID"); continue; } try { //work out service ID String serviceIdStr = headers.get(HeaderKeys.SenderServiceUuid); UUID serviceId = UUID.fromString(serviceIdStr); String software = headers.get(HeaderKeys.SourceSystem); String version = headers.get(HeaderKeys.SystemVersion); Service service = serviceRepository.getById(serviceId); UUID systemUuid = findSystemId(service, software, version); headers.put(HeaderKeys.SenderSystemUuid, systemUuid.toString()); //work out protocol IDs try { String newProtocolIdsJson = DetermineRelevantProtocolIds.getProtocolIdsForPublisherService(serviceIdStr); headers.put(HeaderKeys.ProtocolIds, newProtocolIdsJson); } catch (Exception ex) { LOG.error("Failed to recalculate protocols for " + exchangeId + ": " + ex.getMessage()); } //save to DB headerJson = ObjectMapperPool.getInstance().writeValueAsString(headers); exchange.setHeaders(headerJson); auditRepository.save(exchange); } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); }*/ /*private static void populateExchangeBatchPatients() throws Exception { LOG.info("populateExchangeBatchPatients"); AuditRepository auditRepository = new AuditRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); //ServiceRepository serviceRepository = new ServiceRepository(); //OrganisationRepository organisationRepository = new OrganisationRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT exchange_id FROM audit.exchange LIMIT 500;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID exchangeId = row.get(0, UUID.class); org.endeavourhealth.core.data.audit.models.Exchange exchange = auditRepository.getExchange(exchangeId); String headerJson = exchange.getHeaders(); HashMap<String, String> headers = null; try { headers = ObjectMapperPool.getInstance().readValue(headerJson, HashMap.class); } catch (Exception e) { LOG.error("Failed to read headers for exchange " + exchangeId + " and Json " + headerJson); continue; } if (Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderServiceUuid)) || Strings.isNullOrEmpty(headers.get(HeaderKeys.SenderSystemUuid))) { LOG.info("Skipping exchange " + exchangeId + " because no service or system in header"); continue; } try { UUID serviceId = UUID.fromString(headers.get(HeaderKeys.SenderServiceUuid)); UUID systemId = UUID.fromString(headers.get(HeaderKeys.SenderSystemUuid)); List<ExchangeBatch> exchangeBatches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch exchangeBatch : exchangeBatches) { if (exchangeBatch.getEdsPatientId() != null) { continue; } UUID batchId = exchangeBatch.getBatchId(); List<ResourceByExchangeBatch> resourceWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Patient.toString()); if (resourceWrappers.isEmpty()) { continue; } List<UUID> patientIds = new ArrayList<>(); for (ResourceByExchangeBatch resourceWrapper : resourceWrappers) { UUID patientId = resourceWrapper.getResourceId(); if (resourceWrapper.getIsDeleted()) { deleteEntirePatientRecord(patientId, serviceId, systemId, exchangeId, batchId); } if (!patientIds.contains(patientId)) { patientIds.add(patientId); } } if (patientIds.size() != 1) { LOG.info("Skipping exchange " + exchangeId + " and batch " + batchId + " because found " + patientIds.size() + " patient IDs"); continue; } UUID patientId = patientIds.get(0); exchangeBatch.setEdsPatientId(patientId); exchangeBatchRepository.save(exchangeBatch); } } catch (Exception ex) { LOG.error("Error with exchange " + exchangeId, ex); } } LOG.info("Finished populateExchangeBatchPatients"); } private static void deleteEntirePatientRecord(UUID patientId, UUID serviceId, UUID systemId, UUID exchangeId, UUID batchId) throws Exception { FhirStorageService storageService = new FhirStorageService(serviceId, systemId); ResourceRepository resourceRepository = new ResourceRepository(); List<ResourceByPatient> resourceWrappers = resourceRepository.getResourcesByPatient(serviceId, systemId, patientId); for (ResourceByPatient resourceWrapper: resourceWrappers) { String json = resourceWrapper.getResourceData(); Resource resource = new JsonParser().parse(json); storageService.exchangeBatchDelete(exchangeId, batchId, resource); } }*/ /*private static void convertPatientSearch() { LOG.info("Converting Patient Search"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.EpisodeOfCare.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { EpisodeOfCare episodeOfCare = (EpisodeOfCare) new JsonParser().parse(resourceWrapper.getResourceData()); String patientId = ReferenceHelper.getReferenceId(episodeOfCare.getPatient()); ResourceHistory patientWrapper = resourceRepository.getCurrentVersion(ResourceType.Patient.toString(), UUID.fromString(patientId)); if (Strings.isNullOrEmpty(patientWrapper.getResourceData())) { continue; } Patient patient = (Patient) new JsonParser().parse(patientWrapper.getResourceData()); PatientSearchHelper.update(serviceId, systemId, patient); PatientSearchHelper.update(serviceId, systemId, episodeOfCare); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Search"); } catch (Exception ex) { LOG.error("", ex); } }*/ private static List<UUID> findSystemIds(Service service) throws Exception { List<UUID> ret = new ArrayList<>(); List<ServiceInterfaceEndpoint> endpoints = null; try { endpoints = service.getEndpointsList(); for (ServiceInterfaceEndpoint endpoint : endpoints) { UUID endpointSystemId = endpoint.getSystemUuid(); ret.add(endpointSystemId); } } catch (Exception e) { throw new Exception("Failed to process endpoints from service " + service.getId()); } return ret; } /*private static void convertPatientLink() { LOG.info("Converting Patient Link"); ResourceRepository resourceRepository = new ResourceRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); LOG.info("Doing service " + service.getName()); for (UUID systemId : findSystemIds(service)) { List<ResourceByService> resourceWrappers = resourceRepository.getResourcesByService(serviceId, systemId, ResourceType.Patient.toString()); for (ResourceByService resourceWrapper: resourceWrappers) { if (Strings.isNullOrEmpty(resourceWrapper.getResourceData())) { continue; } try { Patient patient = (Patient)new JsonParser().parse(resourceWrapper.getResourceData()); PatientLinkHelper.updatePersonId(patient); } catch (Exception ex) { LOG.error("Failed on " + resourceWrapper.getResourceType() + " " + resourceWrapper.getResourceId(), ex); } } } } LOG.info("Converted Patient Link"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, ResourceHistory> resourcesFixed = new HashMap<>(); Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Set<UUID> batchIdsToPutInProtocolQueue = new HashSet<>(); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); ResourceFiler filer = new ResourceFiler(exchangeId, serviceId, systemId, null, null, 1); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); ProblemPreTransformer.transform(version, parsers, filer, helper); ObservationPreTransformer.transform(version, parsers, filer, helper); DrugRecordPreTransformer.transform(version, parsers, filer, helper); IssueRecordPreTransformer.transform(version, parsers, filer, helper); DiaryPreTransformer.transform(version, parsers, filer, helper); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient)parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() && !patientParser.getDeleted()) { PatientTransformer.createResource(patientParser, filer, helper, version); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { ConsultationTransformer.createResource(consultationParser, filer, helper, version); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { ObservationTransformer.createResource(observationParser, filer, helper, version); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { DiaryTransformer.createResource(diaryParser, filer, helper, version); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { DrugRecordTransformer.createResource(drugRecordParser, filer, helper, version); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord)parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { IssueRecordTransformer.createResource(issueRecordParser, filer, helper, version); } } issueRecordParser.close(); filer.waitToFinish(); //just to close the thread pool, even though it's not been used List<Resource> resources = filer.getNewResources(); for (Resource resource: resources) { String patientId = IdHelper.getPatientId(resource); UUID edsPatientId = UUID.fromString(patientId); ResourceType resourceType = resource.getResourceType(); UUID resourceId = UUID.fromString(resource.getId()); boolean foundResourceInDbBatch = false; List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds != null) { for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), resourceId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; } foundResourceInDbBatch = true; for (ResourceByExchangeBatch resourceByExchangeBatch : resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (!Strings.isNullOrEmpty(json)) { LOG.warn("JSON already in resource " + resourceType + " " + resourceId); } else { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); LOG.info("Saved resource by batch " + resourceType + " " + resourceId + " in batch " + batchId); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(resourceId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + resourceId + " and version " + versionUuid); } resourceHistory.setIsDeleted(false); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceHistory.setSchemaVersion("0.1"); resourceRepository.save(resourceByExchangeBatch); resourceRepository.save(resourceHistory); batchIdsToPutInProtocolQueue.add(batchId); String key = resourceType.toString() + ":" + resourceId; resourcesFixed.put(key, resourceHistory); } //if a patient became confidential, we will have deleted all resources for that //patient, so we need to undo that too //to undelete WHOLE patient record //1. if THIS resource is a patient //2. get all other deletes from the same exchange batch //3. delete those from resource_by_exchange_batch (the deleted ones only) //4. delete same ones from resource_history //5. retrieve most recent resource_history //6. if not deleted, add to resources fixed if (resourceType == ResourceType.Patient) { List<ResourceByExchangeBatch> resourcesInSameBatch = resourceRepository.getResourcesForBatch(batchId); LOG.info("Undeleting " + resourcesInSameBatch.size() + " resources for batch " + batchId); for (ResourceByExchangeBatch resourceInSameBatch: resourcesInSameBatch) { if (!resourceInSameBatch.getIsDeleted()) { continue; } //patient and episode resources will be restored by the above stuff, so don't try //to do it again if (resourceInSameBatch.getResourceType().equals(ResourceType.Patient.toString()) || resourceInSameBatch.getResourceType().equals(ResourceType.EpisodeOfCare.toString())) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(resourceInSameBatch.getResourceId(), resourceInSameBatch.getResourceType(), resourceInSameBatch.getVersion()); mapperResourceByExchangeBatch.delete(resourceInSameBatch); mapperResourceHistory.delete(deletedResourceHistory); batchIdsToPutInProtocolQueue.add(batchId); //check the most recent version of our resource, and if it's not deleted, add to the list to update the resource_by_service table ResourceHistory mostRecentDeletedResourceHistory = resourceRepository.getCurrentVersion(resourceInSameBatch.getResourceType(), resourceInSameBatch.getResourceId()); if (mostRecentDeletedResourceHistory != null && !mostRecentDeletedResourceHistory.getIsDeleted()) { String key2 = mostRecentDeletedResourceHistory.getResourceType().toString() + ":" + mostRecentDeletedResourceHistory.getResourceId(); resourcesFixed.put(key2, mostRecentDeletedResourceHistory); } } } } } } //if we didn't find records in the DB to update, then if (!foundResourceInDbBatch) { //we can't generate a back-dated time UUID, but we need one so the resource_history //table is in order. To get a suitable time UUID, we just pull out the first exchange batch for our exchange, //and the batch ID is actually a time UUID that was allocated around the right time ExchangeBatch firstBatch = exchangeBatchRepository.retrieveFirstForExchangeId(exchangeId); //if there was no batch for the exchange, then the exchange wasn't processed at all. So skip this exchange //and we'll pick up the same patient data in a following exchange if (firstBatch == null) { continue; } UUID versionUuid = firstBatch.getBatchId(); //find suitable batch ID UUID batchId = null; if (batchIds != null && batchIds.size() > 0) { batchId = batchIds.get(batchIds.size()-1); } else { //create new batch ID if not found ExchangeBatch exchangeBatch = new ExchangeBatch(); exchangeBatch.setBatchId(UUIDs.timeBased()); exchangeBatch.setExchangeId(exchangeId); exchangeBatch.setInsertedAt(new Date()); exchangeBatch.setEdsPatientId(edsPatientId); exchangeBatchRepository.save(exchangeBatch); batchId = exchangeBatch.getBatchId(); //add to map for next resource if (batchIds == null) { batchIds = new ArrayList<>(); } batchIds.add(batchId); batchesPerPatient.put(edsPatientId, batchIds); } String json = parserPool.composeString(resource); ResourceHistory resourceHistory = new ResourceHistory(); resourceHistory.setResourceId(resourceId); resourceHistory.setResourceType(resourceType.toString()); resourceHistory.setVersion(versionUuid); resourceHistory.setCreatedAt(new Date()); resourceHistory.setServiceId(serviceId); resourceHistory.setSystemId(systemId); resourceHistory.setIsDeleted(false); resourceHistory.setSchemaVersion("0.1"); resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); ResourceByExchangeBatch resourceByExchangeBatch = new ResourceByExchangeBatch(); resourceByExchangeBatch.setBatchId(batchId); resourceByExchangeBatch.setExchangeId(exchangeId); resourceByExchangeBatch.setResourceType(resourceType.toString()); resourceByExchangeBatch.setResourceId(resourceId); resourceByExchangeBatch.setVersion(versionUuid); resourceByExchangeBatch.setIsDeleted(false); resourceByExchangeBatch.setSchemaVersion("0.1"); resourceByExchangeBatch.setResourceData(json); resourceRepository.save(resourceHistory); resourceRepository.save(resourceByExchangeBatch); batchIdsToPutInProtocolQueue.add(batchId); } } if (!batchIdsToPutInProtocolQueue.isEmpty()) { exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchIdsToPutInProtocolQueue); } } //update the resource_by_service table (and the resource_by_patient view) for (ResourceHistory resourceHistory: resourcesFixed.values()) { UUID latestVersionUpdatedUuid = resourceHistory.getVersion(); ResourceHistory latestVersion = resourceRepository.getCurrentVersion(resourceHistory.getResourceType(), resourceHistory.getResourceId()); UUID latestVersionUuid = latestVersion.getVersion(); //if there have been subsequent updates to the resource, then skip it if (!latestVersionUuid.equals(latestVersionUpdatedUuid)) { continue; } Resource resource = parserPool.parse(resourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment)metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(resourceHistory.getServiceId()); resourceByService.setSystemId(resourceHistory.getSystemId()); resourceByService.setResourceType(resourceHistory.getResourceType()); resourceByService.setResourceId(resourceHistory.getResourceId()); resourceByService.setCurrentVersion(resourceHistory.getVersion()); resourceByService.setUpdatedAt(resourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(resourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(resourceHistory.getResourceData()); resourceRepository.save(resourceByService); //call out to our patient search and person matching services if (resource instanceof Patient) { PatientLinkHelper.updatePersonId((Patient)resource); PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (Patient)resource); } else if (resource instanceof EpisodeOfCare) { PatientSearchHelper.update(serviceId, resourceHistory.getSystemId(), (EpisodeOfCare)resource); } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Fixing Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixDeletedAppointments(String sharedStoragePath, boolean saveChanges, UUID justThisService) { LOG.info("Fixing Deleted Appointments using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); MappingManager mappingManager = CassandraConnector.getInstance().getMappingManager(); Mapper<ResourceHistory> mapperResourceHistory = mappingManager.mapper(ResourceHistory.class); Mapper<ResourceByExchangeBatch> mapperResourceByExchangeBatch = mappingManager.mapper(ResourceByExchangeBatch.class); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class, dir, version, true, parsers); //find any deleted patients List<UUID> deletedPatientUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getDeleted()) { //find the EDS patient ID for this local guid String patientGuid = patientParser.getPatientGuid(); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } deletedPatientUuids.add(edsPatientId); } } patientParser.close(); //go through the appts file to find properly deleted appt GUIDS List<UUID> deletedApptUuids = new ArrayList<>(); org.endeavourhealth.transform.emis.csv.schema.appointment.Slot apptParser = (org.endeavourhealth.transform.emis.csv.schema.appointment.Slot) parsers.get(org.endeavourhealth.transform.emis.csv.schema.appointment.Slot.class); while (apptParser.nextRecord()) { if (apptParser.getDeleted()) { String patientGuid = apptParser.getPatientGuid(); String slotGuid = apptParser.getSlotGuid(); if (!Strings.isNullOrEmpty(patientGuid)) { String uniqueLocalId = EmisCsvHelper.createUniqueId(patientGuid, slotGuid); UUID edsApptId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Appointment, uniqueLocalId); deletedApptUuids.add(edsApptId); } } } apptParser.close(); for (UUID edsPatientId : deletedPatientUuids) { List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; } for (UUID batchId : batchIds) { List<ResourceByExchangeBatch> apptWrappers = resourceRepository.getResourcesForBatch(batchId, ResourceType.Appointment.toString()); for (ResourceByExchangeBatch apptWrapper : apptWrappers) { //ignore non-deleted appts if (!apptWrapper.getIsDeleted()) { continue; } //if the appt was deleted legitamately, then skip it UUID apptId = apptWrapper.getResourceId(); if (deletedApptUuids.contains(apptId)) { continue; } ResourceHistory deletedResourceHistory = resourceRepository.getResourceHistoryByKey(apptWrapper.getResourceId(), apptWrapper.getResourceType(), apptWrapper.getVersion()); if (saveChanges) { mapperResourceByExchangeBatch.delete(apptWrapper); mapperResourceHistory.delete(deletedResourceHistory); } LOG.info("Un-deleted " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " in batch " + batchId + " patient " + edsPatientId); //now get the most recent instance of the appointment, and if it's NOT deleted, insert into the resource_by_service table ResourceHistory mostRecentResourceHistory = resourceRepository.getCurrentVersion(apptWrapper.getResourceType(), apptWrapper.getResourceId()); if (mostRecentResourceHistory != null && !mostRecentResourceHistory.getIsDeleted()) { Resource resource = parserPool.parse(mostRecentResourceHistory.getResourceData()); ResourceMetadata metadata = MetadataFactory.createMetadata(resource); UUID patientId = ((PatientCompartment) metadata).getPatientId(); ResourceByService resourceByService = new ResourceByService(); resourceByService.setServiceId(mostRecentResourceHistory.getServiceId()); resourceByService.setSystemId(mostRecentResourceHistory.getSystemId()); resourceByService.setResourceType(mostRecentResourceHistory.getResourceType()); resourceByService.setResourceId(mostRecentResourceHistory.getResourceId()); resourceByService.setCurrentVersion(mostRecentResourceHistory.getVersion()); resourceByService.setUpdatedAt(mostRecentResourceHistory.getCreatedAt()); resourceByService.setPatientId(patientId); resourceByService.setSchemaVersion(mostRecentResourceHistory.getSchemaVersion()); resourceByService.setResourceMetadata(JsonSerializer.serialize(metadata)); resourceByService.setResourceData(mostRecentResourceHistory.getResourceData()); if (saveChanges) { resourceRepository.save(resourceByService); } LOG.info("Restored " + apptWrapper.getResourceType() + " " + apptWrapper.getResourceId() + " to resource_by_service table"); } } } } } } LOG.info("Finished Deleted Appointments Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixSlotReferencesForPublisher(String publisher) { try { ServiceDalI dal = DalProvider.factoryServiceDal(); List<Service> services = dal.getAll(); for (Service service: services) { if (service.getPublisherConfigName() != null && service.getPublisherConfigName().equals(publisher)) { fixSlotReferences(service.getId()); } } } catch (Exception ex) { LOG.error("", ex); } } private static void fixSlotReferences(UUID serviceId) { LOG.info("Fixing Slot References in Appointments for " + serviceId); try { //get patient IDs from patient search List<UUID> patientIds = new ArrayList<>(); EntityManager entityManager = ConnectionManager.getPublisherTransformEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); Statement statement = connection.createStatement(); String sql = "SELECT eds_id FROM resource_id_map WHERE service_id = '" + serviceId + "' AND resource_type = '" + ResourceType.Patient + "';"; ResultSet rs = statement.executeQuery(sql); while (rs.next()) { String patientUuid = rs.getString(1); patientIds.add(UUID.fromString(patientUuid)); } rs.close(); statement.close(); connection.close(); LOG.debug("Found " + patientIds.size() + " patients"); int done = 0; int fixed = 0; ResourceDalI resourceDal = DalProvider.factoryResourceDal(); EmisCsvHelper csvHelper = new EmisCsvHelper(serviceId, null, null, null, true, null); //for each patient for (UUID patientUuid: patientIds) { //LOG.debug("Checking patient " + patientUuid); //get all appointment resources List<ResourceWrapper> appointmentWrappers = resourceDal.getResourcesByPatient(serviceId, patientUuid, ResourceType.Appointment.toString()); for (ResourceWrapper apptWrapper: appointmentWrappers) { //LOG.debug("Checking appointment " + apptWrapper.getResourceId()); List<ResourceWrapper> historyWrappers = resourceDal.getResourceHistory(serviceId, apptWrapper.getResourceType(), apptWrapper.getResourceId()); //the above returns most recent first, but we want to do them in order historyWrappers = Lists.reverse(historyWrappers); for (ResourceWrapper historyWrapper : historyWrappers) { if (historyWrapper.isDeleted()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " is deleted"); continue; } String json = historyWrapper.getResourceData(); Appointment appt = (Appointment) FhirSerializationHelper.deserializeResource(json); if (!appt.hasSlot()) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has no slot"); continue; } if (appt.getSlot().size() != 1) { throw new Exception("Appointment " + appt.getId() + " has " + appt.getSlot().size() + " slot refs"); } Reference slotRef = appt.getSlot().get(0); //test if slot reference exists Reference slotLocalRef = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, slotRef); String slotSourceId = ReferenceHelper.getReferenceId(slotLocalRef); if (slotSourceId.indexOf(":") > -1) { //LOG.debug("Appointment " + historyWrapper.getResourceId() + " has a valid slot"); continue; } //if not, correct slot reference Reference apptEdsReference = ReferenceHelper.createReference(appt.getResourceType(), appt.getId()); Reference apptLocalReference = IdHelper.convertEdsReferenceToLocallyUniqueReference(csvHelper, apptEdsReference); String sourceId = ReferenceHelper.getReferenceId(apptLocalReference); Reference slotLocalReference = ReferenceHelper.createReference(ResourceType.Slot, sourceId); Reference slotEdsReference = IdHelper.convertLocallyUniqueReferenceToEdsReference(slotLocalReference, csvHelper); String slotEdsReferenceValue = slotEdsReference.getReference(); String oldSlotRefValue = slotRef.getReference(); slotRef.setReference(slotEdsReferenceValue); //LOG.debug("Appointment " + historyWrapper.getResourceId() + " slot ref changed from " + oldSlotRefValue + " to " + slotEdsReferenceValue); //save appointment json = FhirSerializationHelper.serializeResource(appt); historyWrapper.setResourceData(json); saveResourceWrapper(serviceId, historyWrapper); fixed++; } } done ++; if (done % 1000 == 0) { LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); } } LOG.debug("Done " + done + " / " + patientIds.size() + " and fixed " + fixed + " appts"); LOG.info("Finished Fixing Slot References in Appointments for " + serviceId); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixReviews(String sharedStoragePath, UUID justThisService) { LOG.info("Fixing Reviews using path " + sharedStoragePath + " and service " + justThisService); ResourceRepository resourceRepository = new ResourceRepository(); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ParserPool parserPool = new ParserPool(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); Map<String, Long> problemCodes = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); LOG.info("Doing Emis CSV exchange " + exchangeId + " with " + batches.size() + " batches"); for (ExchangeBatch batch: batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem problemParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation)parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (problemParser.nextRecord()) { String patientGuid = problemParser.getPatientGuid(); String observationGuid = problemParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (!problemCodes.containsKey(key)) { problemCodes.put(key, null); } } problemParser.close(); while (observationParser.nextRecord()) { String patientGuid = observationParser.getPatientGuid(); String observationGuid = observationParser.getObservationGuid(); String key = patientGuid + ":" + observationGuid; if (problemCodes.containsKey(key)) { Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } problemCodes.put(key, codeId); } } observationParser.close(); LOG.info("Found " + problemCodes.size() + " problem codes so far"); String dataSharingAgreementId = EmisCsvToFhirTransformer.findDataSharingAgreementGuid(f); EmisCsvHelper helper = new EmisCsvHelper(dataSharingAgreementId); while (observationParser.nextRecord()) { String problemGuid = observationParser.getProblemGuid(); if (!Strings.isNullOrEmpty(problemGuid)) { String patientGuid = observationParser.getPatientGuid(); Long codeId = observationParser.getCodeId(); if (codeId == null) { continue; } String key = patientGuid + ":" + problemGuid; Long problemCodeId = problemCodes.get(key); if (problemCodeId == null || problemCodeId.longValue() != codeId.longValue()) { continue; } //if here, our code is the same as the problem, so it's a review String locallyUniqueId = patientGuid + ":" + observationParser.getObservationGuid(); ResourceType resourceType = ObservationTransformer.getTargetResourceType(observationParser, helper); for (UUID systemId: systemIds) { UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, patientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + patientGuid); } UUID edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); if (edsObservationId == null) { //try observations as diagnostic reports, because it could be one of those instead if (resourceType == ResourceType.Observation) { resourceType = ResourceType.DiagnosticReport; edsObservationId = IdHelper.getEdsResourceId(serviceId, systemId, resourceType, locallyUniqueId); } if (edsObservationId == null) { throw new Exception("Failed to find observation ID for service " + serviceId + " system " + systemId + " resourceType " + resourceType + " local ID " + locallyUniqueId); } } List<UUID> batchIds = batchesPerPatient.get(edsPatientId); if (batchIds == null) { //if there are no batches for this patient, we'll be handling this data in another exchange continue; //throw new Exception("Failed to find batch ID for patient " + edsPatientId + " in exchange " + exchangeId + " for resource " + resourceType + " " + edsObservationId); } for (UUID batchId: batchIds) { List<ResourceByExchangeBatch> resourceByExchangeBatches = resourceRepository.getResourcesForBatch(batchId, resourceType.toString(), edsObservationId); if (resourceByExchangeBatches.isEmpty()) { //if we've deleted data, this will be null continue; //throw new Exception("No resources found for batch " + batchId + " resource type " + resourceType + " and resource id " + edsObservationId); } for (ResourceByExchangeBatch resourceByExchangeBatch: resourceByExchangeBatches) { String json = resourceByExchangeBatch.getResourceData(); if (Strings.isNullOrEmpty(json)) { throw new Exception("No JSON in resource " + resourceType + " " + edsObservationId + " in batch " + batchId); } Resource resource = parserPool.parse(json); if (addReviewExtension((DomainResource)resource)) { json = parserPool.composeString(resource); resourceByExchangeBatch.setResourceData(json); LOG.info("Changed " + resourceType + " " + edsObservationId + " to have extension in batch " + batchId); resourceRepository.save(resourceByExchangeBatch); UUID versionUuid = resourceByExchangeBatch.getVersion(); ResourceHistory resourceHistory = resourceRepository.getResourceHistoryByKey(edsObservationId, resourceType.toString(), versionUuid); if (resourceHistory == null) { throw new Exception("Failed to find resource history for " + resourceType + " " + edsObservationId + " and version " + versionUuid); } resourceHistory.setResourceData(json); resourceHistory.setResourceChecksum(FhirStorageService.generateChecksum(json)); resourceRepository.save(resourceHistory); ResourceByService resourceByService = resourceRepository.getResourceByServiceByKey(serviceId, systemId, resourceType.toString(), edsObservationId); if (resourceByService != null) { UUID serviceVersionUuid = resourceByService.getCurrentVersion(); if (serviceVersionUuid.equals(versionUuid)) { resourceByService.setResourceData(json); resourceRepository.save(resourceByService); } } } else { LOG.info("" + resourceType + " " + edsObservationId + " already has extension"); } } } } //1. find out resource type originall saved from //2. retrieve from resource_by_exchange_batch //3. update resource in resource_by_exchange_batch //4. retrieve from resource_history //5. update resource_history //6. retrieve record from resource_by_service //7. if resource_by_service version UUID matches the resource_history updated, then update that too } } observationParser.close(); } } LOG.info("Finished Fixing Reviews"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static boolean addReviewExtension(DomainResource resource) { if (ExtensionConverter.hasExtension(resource, FhirExtensionUri.IS_REVIEW)) { return false; } Extension extension = ExtensionConverter.createExtension(FhirExtensionUri.IS_REVIEW, new BooleanType(true)); resource.addExtension(extension); return true; }*/ /*private static void runProtocolsForConfidentialPatients(String sharedStoragePath, UUID justThisService) { LOG.info("Running Protocols for Confidential Patients using path " + sharedStoragePath + " and service " + justThisService); ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); try { Iterable<Service> iterable = new ServiceRepository().getAll(); for (Service service : iterable) { UUID serviceId = service.getId(); if (justThisService != null && !service.getId().equals(justThisService)) { LOG.info("Skipping service " + service.getName()); continue; } //once we match the servce, set this to null to do all other services justThisService = null; LOG.info("Doing service " + service.getName()); List<UUID> systemIds = findSystemIds(service); List<String> interestingPatientGuids = new ArrayList<>(); Map<UUID, Map<UUID, List<UUID>>> batchesPerPatientPerExchange = new HashMap<>(); List<UUID> exchangeIds = new AuditRepository().getExchangeIdsForService(serviceId); for (UUID exchangeId: exchangeIds) { Exchange exchange = AuditWriter.readExchange(exchangeId); String software = exchange.getHeader(HeaderKeys.SourceSystem); if (!software.equalsIgnoreCase(MessageFormat.EMIS_CSV)) { continue; } String body = exchange.getBody(); String[] files = body.split(java.lang.System.lineSeparator()); if (files.length == 0) { continue; } LOG.info("Doing Emis CSV exchange " + exchangeId); Map<UUID, List<UUID>> batchesPerPatient = new HashMap<>(); List<ExchangeBatch> batches = exchangeBatchRepository.retrieveForExchangeId(exchangeId); for (ExchangeBatch batch : batches) { UUID patientId = batch.getEdsPatientId(); if (patientId != null) { List<UUID> batchIds = batchesPerPatient.get(patientId); if (batchIds == null) { batchIds = new ArrayList<>(); batchesPerPatient.put(patientId, batchIds); } batchIds.add(batch.getBatchId()); } } batchesPerPatientPerExchange.put(exchangeId, batchesPerPatient); File f = new File(sharedStoragePath, files[0]); File dir = f.getParentFile(); String version = EmisCsvToFhirTransformer.determineVersion(dir); Map<Class, AbstractCsvParser> parsers = new HashMap<>(); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Problem.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class, dir, version, true, parsers); EmisCsvToFhirTransformer.findFileAndOpenParser(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class, dir, version, true, parsers); org.endeavourhealth.transform.emis.csv.schema.admin.Patient patientParser = (org.endeavourhealth.transform.emis.csv.schema.admin.Patient) parsers.get(org.endeavourhealth.transform.emis.csv.schema.admin.Patient.class); while (patientParser.nextRecord()) { if (patientParser.getIsConfidential() || patientParser.getDeleted()) { interestingPatientGuids.add(patientParser.getPatientGuid()); } } patientParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation consultationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Consultation.class); while (consultationParser.nextRecord()) { if (consultationParser.getIsConfidential() && !consultationParser.getDeleted()) { interestingPatientGuids.add(consultationParser.getPatientGuid()); } } consultationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation observationParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Observation.class); while (observationParser.nextRecord()) { if (observationParser.getIsConfidential() && !observationParser.getDeleted()) { interestingPatientGuids.add(observationParser.getPatientGuid()); } } observationParser.close(); org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary diaryParser = (org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary) parsers.get(org.endeavourhealth.transform.emis.csv.schema.careRecord.Diary.class); while (diaryParser.nextRecord()) { if (diaryParser.getIsConfidential() && !diaryParser.getDeleted()) { interestingPatientGuids.add(diaryParser.getPatientGuid()); } } diaryParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord drugRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.DrugRecord.class); while (drugRecordParser.nextRecord()) { if (drugRecordParser.getIsConfidential() && !drugRecordParser.getDeleted()) { interestingPatientGuids.add(drugRecordParser.getPatientGuid()); } } drugRecordParser.close(); org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord issueRecordParser = (org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord) parsers.get(org.endeavourhealth.transform.emis.csv.schema.prescribing.IssueRecord.class); while (issueRecordParser.nextRecord()) { if (issueRecordParser.getIsConfidential() && !issueRecordParser.getDeleted()) { interestingPatientGuids.add(issueRecordParser.getPatientGuid()); } } issueRecordParser.close(); } Map<UUID, Set<UUID>> exchangeBatchesToPutInProtocolQueue = new HashMap<>(); for (String interestingPatientGuid: interestingPatientGuids) { if (systemIds.size() > 1) { throw new Exception("Multiple system IDs for service " + serviceId); } UUID systemId = systemIds.get(0); UUID edsPatientId = IdHelper.getEdsResourceId(serviceId, systemId, ResourceType.Patient, interestingPatientGuid); if (edsPatientId == null) { throw new Exception("Failed to find patient ID for service " + serviceId + " system " + systemId + " resourceType " + ResourceType.Patient + " local ID " + interestingPatientGuid); } for (UUID exchangeId: batchesPerPatientPerExchange.keySet()) { Map<UUID, List<UUID>> batchesPerPatient = batchesPerPatientPerExchange.get(exchangeId); List<UUID> batches = batchesPerPatient.get(edsPatientId); if (batches != null) { Set<UUID> batchesForExchange = exchangeBatchesToPutInProtocolQueue.get(exchangeId); if (batchesForExchange == null) { batchesForExchange = new HashSet<>(); exchangeBatchesToPutInProtocolQueue.put(exchangeId, batchesForExchange); } batchesForExchange.addAll(batches); } } } if (!exchangeBatchesToPutInProtocolQueue.isEmpty()) { //find the config for our protocol queue String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatchesToPutInProtocolQueue.keySet()) { Set<UUID> batchIds = exchangeBatchesToPutInProtocolQueue.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } } LOG.info("Finished Running Protocols for Confidential Patients"); } catch (Exception ex) { LOG.error("", ex); } }*/ /*private static void fixOrgs() { LOG.info("Posting orgs to protocol queue"); String[] orgIds = new String[]{ "332f31a2-7b28-47cb-af6f-18f65440d43d", "c893d66b-eb89-4657-9f53-94c5867e7ed9"}; ExchangeBatchRepository exchangeBatchRepository = new ExchangeBatchRepository(); ResourceRepository resourceRepository = new ResourceRepository(); Map<UUID, Set<UUID>> exchangeBatches = new HashMap<>(); for (String orgId: orgIds) { LOG.info("Doing org ID " + orgId); UUID orgUuid = UUID.fromString(orgId); try { //select batch_id from ehr.resource_by_exchange_batch where resource_type = 'Organization' and resource_id = 8f465517-729b-4ad9-b405-92b487047f19 LIMIT 1 ALLOW FILTERING; ResourceByExchangeBatch resourceByExchangeBatch = resourceRepository.getFirstResourceByExchangeBatch(ResourceType.Organization.toString(), orgUuid); UUID batchId = resourceByExchangeBatch.getBatchId(); //select exchange_id from ehr.exchange_batch where batch_id = 1a940e10-1535-11e7-a29d-a90b99186399 LIMIT 1 ALLOW FILTERING; ExchangeBatch exchangeBatch = exchangeBatchRepository.retrieveFirstForBatchId(batchId); UUID exchangeId = exchangeBatch.getExchangeId(); Set<UUID> list = exchangeBatches.get(exchangeId); if (list == null) { list = new HashSet<>(); exchangeBatches.put(exchangeId, list); } list.add(batchId); } catch (Exception ex) { LOG.error("", ex); break; } } try { //find the config for our protocol queue (which is in the inbound config) String configXml = ConfigManager.getConfiguration("inbound", "queuereader"); //the config XML may be one of two serialised classes, so we use a try/catch to safely try both if necessary QueueReaderConfiguration configuration = ConfigDeserialiser.deserialise(configXml); Pipeline pipeline = configuration.getPipeline(); PostMessageToExchangeConfig config = pipeline .getPipelineComponents() .stream() .filter(t -> t instanceof PostMessageToExchangeConfig) .map(t -> (PostMessageToExchangeConfig) t) .filter(t -> t.getExchange().equalsIgnoreCase("EdsProtocol")) .collect(StreamExtension.singleOrNullCollector()); //post to the protocol exchange for (UUID exchangeId : exchangeBatches.keySet()) { Set<UUID> batchIds = exchangeBatches.get(exchangeId); org.endeavourhealth.core.messaging.exchange.Exchange exchange = AuditWriter.readExchange(exchangeId); String batchIdString = ObjectMapperPool.getInstance().writeValueAsString(batchIds); exchange.setHeader(HeaderKeys.BatchIdsJson, batchIdString); LOG.info("Posting exchange " + exchangeId + " batch " + batchIdString); PostMessageToExchange component = new PostMessageToExchange(config); component.process(exchange); } } catch (Exception ex) { LOG.error("", ex); return; } LOG.info("Finished posting orgs to protocol queue"); }*/ /*private static void findCodes() { LOG.info("Finding missing codes"); AuditRepository auditRepository = new AuditRepository(); ServiceRepository serviceRepository = new ServiceRepository(); Session session = CassandraConnector.getInstance().getSession(); Statement stmt = new SimpleStatement("SELECT service_id, system_id, exchange_id, version FROM audit.exchange_transform_audit ALLOW FILTERING;"); stmt.setFetchSize(100); ResultSet rs = session.execute(stmt); while (!rs.isExhausted()) { Row row = rs.one(); UUID serviceId = row.get(0, UUID.class); UUID systemId = row.get(1, UUID.class); UUID exchangeId = row.get(2, UUID.class); UUID version = row.get(3, UUID.class); ExchangeTransformAudit audit = auditRepository.getExchangeTransformAudit(serviceId, systemId, exchangeId, version); String xml = audit.getErrorXml(); if (xml == null) { continue; } String codePrefix = "Failed to find clinical code CodeableConcept for codeId "; int codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " clinical code " + code + " from " + audit.getStarted()); continue; } codePrefix = "Failed to find medication CodeableConcept for codeId "; codeIndex = xml.indexOf(codePrefix); if (codeIndex > -1) { int startIndex = codeIndex + codePrefix.length(); int tagEndIndex = xml.indexOf("<", startIndex); String code = xml.substring(startIndex, tagEndIndex); Service service = serviceRepository.getById(serviceId); String name = service.getName(); LOG.info(name + " drug code " + code + " from " + audit.getStarted()); continue; } } LOG.info("Finished finding missing codes"); }*/ private static void createEmisSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Emis Subset"); try { Set<String> patientGuids = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } patientGuids.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createEmisSubsetForFile(sourceDir, destDir, patientGuids); LOG.info("Finished Creating Emis Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createEmisSubsetForFile(File sourceDir, File destDir, Set<String> patientGuids) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createEmisSubsetForFile(sourceFile, destFile, patientGuids); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } CSVFormat format = CSVFormat.DEFAULT.withHeader(); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile))); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("PatientGuid")) { filterColumn = "PatientGuid"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile))); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientGuid = csvRecord.get(filterColumn); if (Strings.isNullOrEmpty(patientGuid) //if empty, carry over this record || patientGuids.contains(patientGuid)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createTppSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating TPP Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createTppSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating TPP Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createTppSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } //LOG.info("Doing dir " + sourceFile); createTppSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); String filterColumn = null; Map<String, Integer> headerMap = parser.getHeaderMap(); if (headerMap.containsKey("IDPatient")) { filterColumn = "IDPatient"; } else if (name.equalsIgnoreCase("SRPatient.csv")) { filterColumn = "RowIdentifier"; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFile), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); /*} else { //the 2.1 files are going to be a pain to split by patient, so just copy them over LOG.info("Copying 2.1 file " + sourceFile); copyFile(sourceFile, destFile); }*/ } } } private static void createVisionSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Vision Subset"); try { Set<String> personIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } personIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createVisionSubsetForFile(sourceDir, destDir, personIds); LOG.info("Finished Creating Vision Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createVisionSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createVisionSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; if (name.contains("encounter_data") || name.contains("journal_data") || name.contains("patient_data") || name.contains("referral_data")) { filterColumn = 0; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createHomertonSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Homerton Subset"); try { Set<String> PersonIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } PersonIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createHomertonSubsetForFile(sourceDir, destDir, PersonIds); LOG.info("Finished Creating Homerton Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createHomertonSubsetForFile(File sourceDir, File destDir, Set<String> personIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createHomertonSubsetForFile(sourceFile, destFile, personIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withHeader(); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //PersonId column at 1 if (name.contains("ENCOUNTER") || name.contains("PATIENT")) { filterColumn = 1; } else if (name.contains("DIAGNOSIS")) { //PersonId column at 13 filterColumn = 13; } else if (name.contains("ALLERGY")) { //PersonId column at 2 filterColumn = 2; } else if (name.contains("PROBLEM")) { //PersonId column at 4 filterColumn = 4; } else { //if no patient column, just copy the file (i.e. PROCEDURE) parser.close(); LOG.info("Copying file without PatientId " + sourceFile); copyFile(sourceFile, destFile); continue; } Map<String, Integer> headerMap = parser.getHeaderMap(); String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String patientId = csvRecord.get(filterColumn); if (personIds.contains(patientId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } private static void createAdastraSubset(String sourceDirPath, String destDirPath, String samplePatientsFile) { LOG.info("Creating Adastra Subset"); try { Set<String> caseIds = new HashSet<>(); List<String> lines = Files.readAllLines(new File(samplePatientsFile).toPath()); for (String line : lines) { line = line.trim(); //ignore comments if (line.startsWith("#")) { continue; } //adastra extract files are all keyed on caseId caseIds.add(line); } File sourceDir = new File(sourceDirPath); File destDir = new File(destDirPath); if (!destDir.exists()) { destDir.mkdirs(); } createAdastraSubsetForFile(sourceDir, destDir, caseIds); LOG.info("Finished Creating Adastra Subset"); } catch (Throwable t) { LOG.error("", t); } } private static void createAdastraSubsetForFile(File sourceDir, File destDir, Set<String> caseIds) throws Exception { File[] files = sourceDir.listFiles(); LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String name = sourceFile.getName(); File destFile = new File(destDir, name); if (sourceFile.isDirectory()) { if (!destFile.exists()) { destFile.mkdirs(); } createAdastraSubsetForFile(sourceFile, destFile, caseIds); } else { if (destFile.exists()) { destFile.delete(); } LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(name); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } FileReader fr = new FileReader(sourceFile); BufferedReader br = new BufferedReader(fr); //fully quote destination file to fix CRLF in columns CSVFormat format = CSVFormat.DEFAULT.withDelimiter('|'); CSVParser parser = new CSVParser(br, format); int filterColumn = -1; //CaseRef column at 0 if (name.contains("NOTES") || name.contains("CASEQUESTIONS") || name.contains("OUTCOMES") || name.contains("CONSULTATION") || name.contains("CLINICALCODES") || name.contains("PRESCRIPTIONS") || name.contains("PATIENT")) { filterColumn = 0; } else if (name.contains("CASE")) { //CaseRef column at 2 filterColumn = 2; } else if (name.contains("PROVIDER")) { //CaseRef column at 7 filterColumn = 7; } else { //if no patient column, just copy the file parser.close(); LOG.info("Copying non-patient file " + sourceFile); copyFile(sourceFile, destFile); continue; } PrintWriter fw = new PrintWriter(destFile); BufferedWriter bw = new BufferedWriter(fw); CSVPrinter printer = new CSVPrinter(bw, format); Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String caseId = csvRecord.get(filterColumn); if (caseIds.contains(caseId)) { printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); } } } /*private static void exportFhirToCsv(UUID serviceId, String destinationPath) { try { File dir = new File(destinationPath); if (dir.exists()) { dir.mkdirs(); } Map<String, CSVPrinter> hmPrinters = new HashMap<>(); EntityManager entityManager = ConnectionManager.getEhrEntityManager(serviceId); SessionImpl session = (SessionImpl) entityManager.getDelegate(); Connection connection = session.connection(); PreparedStatement ps = connection.prepareStatement("SELECT resource_id, resource_type, resource_data FROM resource_current"); LOG.debug("Running query"); ResultSet rs = ps.executeQuery(); LOG.debug("Got result set"); while (rs.next()) { String id = rs.getString(1); String type = rs.getString(2); String json = rs.getString(3); CSVPrinter printer = hmPrinters.get(type); if (printer == null) { String path = FilenameUtils.concat(dir.getAbsolutePath(), type + ".tsv"); FileWriter fileWriter = new FileWriter(new File(path)); BufferedWriter bufferedWriter = new BufferedWriter(fileWriter); CSVFormat format = CSVFormat.DEFAULT .withHeader("resource_id", "resource_json") .withDelimiter('\t') .withEscape((Character) null) .withQuote((Character) null) .withQuoteMode(QuoteMode.MINIMAL); printer = new CSVPrinter(bufferedWriter, format); hmPrinters.put(type, printer); } printer.printRecord(id, json); } for (String type : hmPrinters.keySet()) { CSVPrinter printer = hmPrinters.get(type); printer.flush(); printer.close(); } ps.close(); entityManager.close(); } catch (Throwable t) { LOG.error("", t); } }*/ /*private static void fixTPPNullOrgs(String sourceDir, String orgODS) throws Exception { final String COLUMN_ORG = "IDOrganisationVisibleTo"; File[] files = new File(sourceDir).listFiles(); if (files == null) return; LOG.info("Found " + files.length + " files in " + sourceDir); for (File sourceFile : files) { String sourceFileName = sourceFile.getName(); if (sourceFile.isDirectory()) { fixTPPNullOrgs(sourceFileName, orgODS); } else { LOG.info("Checking file " + sourceFile); //skip any non-CSV file String ext = FilenameUtils.getExtension(sourceFileName); if (!ext.equalsIgnoreCase("csv")) { LOG.info("Skipping as not a CSV file"); continue; } Charset encoding = Charset.forName("CP1252"); InputStreamReader reader = new InputStreamReader( new BufferedInputStream( new FileInputStream(sourceFile)), encoding); CSVFormat format = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL).withHeader(); CSVParser parser = new CSVParser(reader, format); Map<String, Integer> headerMap = parser.getHeaderMap(); if (!headerMap.containsKey(COLUMN_ORG)) { //if no COLUMN_ORG column, ignore LOG.info("Ignoring file with no " + COLUMN_ORG + " column: " + sourceFile); parser.close(); continue; } String[] columnHeaders = new String[headerMap.size()]; Iterator<String> headerIterator = headerMap.keySet().iterator(); while (headerIterator.hasNext()) { String headerName = headerIterator.next(); int headerIndex = headerMap.get(headerName); columnHeaders[headerIndex] = headerName; } String destFileName = sourceFileName.concat(".FIXED"); BufferedWriter bw = new BufferedWriter( new OutputStreamWriter( new FileOutputStream(destFileName), encoding)); CSVPrinter printer = new CSVPrinter(bw, format.withHeader(columnHeaders)); //iterate down the file and look at Org Column Iterator<CSVRecord> csvIterator = parser.iterator(); while (csvIterator.hasNext()) { CSVRecord csvRecord = csvIterator.next(); String fileOrgODS = csvRecord.get(COLUMN_ORG); //set the empty value to that orgODS value passed in if (Strings.isNullOrEmpty(fileOrgODS)) { Map <String, String> recordMap = csvRecord.toMap(); recordMap.put(COLUMN_ORG, String.valueOf(orgODS)); List<String> alteredCsvRecord = new ArrayList<String>(); for (String key : columnHeaders) { alteredCsvRecord.add(recordMap.get(key)); } printer.printRecord(alteredCsvRecord); printer.flush(); } else { if (!fileOrgODS.equalsIgnoreCase(orgODS)) { parser.close(); printer.flush(); printer.close(); throw new Exception("File contains different ODS codes to parameter value - aborting"); } //write the record back unchanged printer.printRecord(csvRecord); printer.flush(); } } parser.close(); printer.close(); //Finally, delete source file and rename the fixed destination file back to source sourceFile.delete(); new File (destFileName).renameTo(new File (sourceFileName)); } } }*/ } /*class ResourceFiler extends FhirResourceFiler { public ResourceFiler(UUID exchangeId, UUID serviceId, UUID systemId, TransformError transformError, List<UUID> batchIdsCreated, int maxFilingThreads) { super(exchangeId, serviceId, systemId, transformError, batchIdsCreated, maxFilingThreads); } private List<Resource> newResources = new ArrayList<>(); public List<Resource> getNewResources() { return newResources; } @Override public void saveAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling saveAdminResource"); } @Override public void deleteAdminResource(CsvCurrentState parserState, boolean mapIds, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deleteAdminResource"); } @Override public void savePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { for (Resource resource: resources) { if (mapIds) { IdHelper.mapIds(getServiceId(), getSystemId(), resource); } newResources.add(resource); } } @Override public void deletePatientResource(CsvCurrentState parserState, boolean mapIds, String patientId, Resource... resources) throws Exception { throw new Exception("shouldn't be calling deletePatientResource"); } }*/ /* class MoveToS3Runnable implements Runnable { private static final Logger LOG = LoggerFactory.getLogger(MoveToS3Runnable.class); private List<FileInfo> files = null; private AtomicInteger done = null; public MoveToS3Runnable(List<FileInfo> files, AtomicInteger done) { this.files = files; this.done = done; } @Override public void run() { try { doWork(); } catch (Exception ex) { LOG.error("", ex); } } private void doWork() throws Exception { SourceFileMappingDalI db = DalProvider.factorySourceFileMappingDal(); //write to database //Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>(); for (FileInfo info: files) { String path = info.getFilePath(); InputStream inputStream = FileHelper.readFileFromSharedStorage(path); ZipInputStream zis = new ZipInputStream(inputStream); ZipEntry entry = zis.getNextEntry(); if (entry == null) { throw new Exception("No entry in zip file " + path); } byte[] entryBytes = IOUtils.toByteArray(zis); String json = new String(entryBytes); inputStream.close(); ResourceFieldMappingAudit audit = ResourceFieldMappingAudit.readFromJson(json); ResourceWrapper wrapper = new ResourceWrapper(); String versionStr = FilenameUtils.getBaseName(path); wrapper.setVersion(UUID.fromString(versionStr)); Date d = info.getLastModified(); wrapper.setCreatedAt(d); File f = new File(path); f = f.getParentFile(); String resourceIdStr = f.getName(); wrapper.setResourceId(UUID.fromString(resourceIdStr)); f = f.getParentFile(); String resourceTypeStr = f.getName(); wrapper.setResourceType(resourceTypeStr); f = f.getParentFile(); String serviceIdStr = f.getName(); wrapper.setServiceId(UUID.fromString(serviceIdStr)); Map<ResourceWrapper, ResourceFieldMappingAudit> batch = new HashMap<>(); batch.put(wrapper, audit); try { db.saveResourceMappings(batch); } catch (Exception ex) { String msg = ex.getMessage(); if (msg.indexOf("Duplicate entry") == -1) { throw ex; } } */ /*if (batch.size() > 5) { db.saveResourceMappings(batch); batch.clear(); }*//* int nowDone = done.incrementAndGet(); if (nowDone % 1000 == 0) { LOG.debug("Done " + nowDone + " / " + files.size()); } } */ /*if (!batch.isEmpty()) { db.saveResourceMappings(batch); batch.clear(); }*//* } }*/ class PopulateDataDateCallable implements Callable { private static final Logger LOG = LoggerFactory.getLogger(PopulateDataDateCallable.class); private static ExchangeDalI exchangeDal = DalProvider.factoryExchangeDal(); private UUID exchangeId = null; private AtomicInteger fixed = null; public PopulateDataDateCallable(UUID exchangeId, AtomicInteger fixed) { this.exchangeId = exchangeId; this.fixed = fixed; } private void doWork() throws Exception { Exchange exchange = exchangeDal.getExchange(exchangeId); //check if already done String existingVal = exchange.getHeader(HeaderKeys.DataDate); String software = exchange.getHeader(HeaderKeys.SourceSystem); String version = exchange.getHeader(HeaderKeys.SystemVersion); if (!Strings.isNullOrEmpty(existingVal)) { LOG.info("Already done exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } String body = exchange.getBody(); if (body.equals("[]")) { LOG.error("Empty body found in exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } Date lastDataDate = OpenEnvelope.calculateLastDataDate(software, version, body); if (lastDataDate == null) { LOG.error("Failed to calculate data for exchange " + exchange.getId() + " software " + software + " version " + version); markAsDone(); return; } exchange.setHeaderAsDate(HeaderKeys.DataDate, lastDataDate); exchangeDal.save(exchange); //mark as done markAsDone(); fixed.incrementAndGet(); } private void markAsDone() throws Exception { EntityManager auditEntityManager = ConnectionManager.getAuditEntityManager(); auditEntityManager.getTransaction().begin(); SessionImpl auditSession = (SessionImpl)auditEntityManager.getDelegate(); Connection auditConnection = auditSession.connection(); String sql = "UPDATE drewtest.exchange_ids SET done = 1 WHERE id = ?"; PreparedStatement ps = auditConnection.prepareStatement(sql); ps.setString(1, exchangeId.toString()); ps.executeUpdate(); auditEntityManager.getTransaction().commit(); ps.close(); auditEntityManager.close(); //LOG.debug("Marked as done using: " + sql); } @Override public Object call() throws Exception { try { doWork(); } catch (Throwable ex) { LOG.error("Error with " + exchangeId, ex); } return null; } } /* class TestRabbitConsumer extends DefaultConsumer { private static final Logger LOG = LoggerFactory.getLogger(TestRabbitConsumer.class); public TestRabbitConsumer(Channel channel) { super(channel); } @Override public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] bytes) throws IOException { long deliveryTag = envelope.getDeliveryTag(); String bodyStr = new String(bytes, "UTF-8"); LOG.info("Received exchange body: " + bodyStr); try { Thread.sleep(1000); } catch (Throwable t) { LOG.error("", t); } this.getChannel().basicAck(deliveryTag, false); } }*/
Adding routine to detect exchanges for TPP publishers that contain bulks of non-patient files that shouldn't have been created
src/eds-queuereader/src/main/java/org/endeavourhealth/queuereader/Main.java
Adding routine to detect exchanges for TPP publishers that contain bulks of non-patient files that shouldn't have been created
Java
apache-2.0
69cf702d273add11b6a5cf01e07fe3df7a6d41c6
0
subclipse/svnclientadapter
/******************************************************************************* * Copyright (c) 2003, 2006 svnClientAdapter project and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * svnClientAdapter project committers - initial API and implementation ******************************************************************************/ package org.tigris.subversion.svnclientadapter.javahl; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.subversion.javahl.types.ChangePath; import org.apache.subversion.javahl.types.LogDate; import org.tigris.subversion.svnclientadapter.ISVNLogMessage; import org.tigris.subversion.svnclientadapter.ISVNLogMessageChangePath; import org.tigris.subversion.svnclientadapter.SVNRevision; /** * A JavaHL based implementation of {@link ISVNLogMessage}. * Actually just an adapter from {@link org.tigris.subversion.javahl.LogMessage} * * @author philip schatz */ public class JhlLogMessage implements ISVNLogMessage { private static final String EMPTY = ""; private List<ISVNLogMessage> children; private boolean hasChildren; private ISVNLogMessageChangePath[] changedPaths; private SVNRevision.Number revision; private Map<String, byte[]> revprops; private LogDate logDate; public JhlLogMessage(Set<ChangePath> changedPaths, long revision, Map<String, byte[]> revprops, boolean hasChildren) { this.changedPaths = JhlConverter.convertChangePaths(changedPaths); this.revision = new SVNRevision.Number(revision); this.revprops = revprops; if (this.revprops == null) { this.revprops = new HashMap<String, byte[]>(2); // avoid NullPointerErrors this.revprops.put(AUTHOR, EMPTY.getBytes()); this.revprops.put(MESSAGE, EMPTY.getBytes()); } this.hasChildren = hasChildren; try { logDate = new LogDate(new String(this.revprops.get(DATE))); } catch (ParseException e) { } } public void addChild(ISVNLogMessage msg) { if (children == null) children = new ArrayList<ISVNLogMessage>(); children.add(msg); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getRevision() */ public SVNRevision.Number getRevision() { return revision; } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getAuthor() */ public String getAuthor() { byte[] author = revprops.get(AUTHOR); if (author == null) { return ""; } else { return new String(author); } } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getDate() */ public Date getDate() { if (logDate == null) return new Date(0L); return logDate.getDate(); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getMessage() */ public String getMessage() { byte[] message = revprops.get(MESSAGE); if (message == null) { return ""; } else { return new String(message); } } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getChangedPaths() */ public ISVNLogMessageChangePath[] getChangedPaths() { return changedPaths; } /* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { return getMessage(); } public ISVNLogMessage[] getChildMessages() { if (hasChildren && children != null) { ISVNLogMessage[] childArray = new JhlLogMessage[children.size()]; children.toArray(childArray); return childArray; } else return null; } public long getNumberOfChildren() { if (hasChildren && children != null) return children.size(); else return 0L; } public long getTimeMillis() { if (logDate == null) return 0L; return logDate.getTimeMillis(); } public long getTimeMicros() { if (logDate == null) return 0L; return logDate.getTimeMicros(); } public boolean hasChildren() { return hasChildren; } }
src/javahl/org/tigris/subversion/svnclientadapter/javahl/JhlLogMessage.java
/******************************************************************************* * Copyright (c) 2003, 2006 svnClientAdapter project and others. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributors: * svnClientAdapter project committers - initial API and implementation ******************************************************************************/ package org.tigris.subversion.svnclientadapter.javahl; import java.text.ParseException; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.subversion.javahl.types.ChangePath; import org.apache.subversion.javahl.types.LogDate; import org.tigris.subversion.svnclientadapter.ISVNLogMessage; import org.tigris.subversion.svnclientadapter.ISVNLogMessageChangePath; import org.tigris.subversion.svnclientadapter.SVNRevision; /** * A JavaHL based implementation of {@link ISVNLogMessage}. * Actually just an adapter from {@link org.tigris.subversion.javahl.LogMessage} * * @author philip schatz */ public class JhlLogMessage implements ISVNLogMessage { private static final String EMPTY = ""; private List<ISVNLogMessage> children; private boolean hasChildren; private ISVNLogMessageChangePath[] changedPaths; private SVNRevision.Number revision; private Map<String, byte[]> revprops; private LogDate logDate; public JhlLogMessage(Set<ChangePath> changedPaths, long revision, Map<String, byte[]> revprops, boolean hasChildren) { this.changedPaths = JhlConverter.convertChangePaths(changedPaths); this.revision = new SVNRevision.Number(revision); this.revprops = revprops; if (this.revprops == null) { this.revprops = new HashMap<String, byte[]>(2); // avoid NullPointerErrors this.revprops.put(AUTHOR, EMPTY.getBytes()); this.revprops.put(MESSAGE, EMPTY.getBytes()); } this.hasChildren = hasChildren; try { logDate = new LogDate(new String(this.revprops.get(DATE))); } catch (ParseException e) { } } public void addChild(ISVNLogMessage msg) { if (children == null) children = new ArrayList<ISVNLogMessage>(); children.add(msg); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getRevision() */ public SVNRevision.Number getRevision() { return revision; } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getAuthor() */ public String getAuthor() { return new String(revprops.get(AUTHOR)); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getDate() */ public Date getDate() { if (logDate == null) return new Date(0L); return logDate.getDate(); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getMessage() */ public String getMessage() { return new String(revprops.get(MESSAGE)); } /* (non-Javadoc) * @see org.tigris.subversion.svnclientadapter.ISVNLogMessage#getChangedPaths() */ public ISVNLogMessageChangePath[] getChangedPaths() { return changedPaths; } /* (non-Javadoc) * @see java.lang.Object#toString() */ public String toString() { return getMessage(); } public ISVNLogMessage[] getChildMessages() { if (hasChildren && children != null) { ISVNLogMessage[] childArray = new JhlLogMessage[children.size()]; children.toArray(childArray); return childArray; } else return null; } public long getNumberOfChildren() { if (hasChildren && children != null) return children.size(); else return 0L; } public long getTimeMillis() { if (logDate == null) return 0L; return logDate.getTimeMillis(); } public long getTimeMicros() { if (logDate == null) return 0L; return logDate.getTimeMicros(); } public boolean hasChildren() { return hasChildren; } }
Fix NPE in String initialization when revprops author or message is null.
src/javahl/org/tigris/subversion/svnclientadapter/javahl/JhlLogMessage.java
Fix NPE in String initialization when revprops author or message is null.
Java
apache-2.0
ea934c554e31e1b95f0d4452fd3951cbb17bb6de
0
yanagishima/yanagishima,yanagishima/yanagishima,yanagishima/yanagishima,yanagishima/yanagishima
package yanagishima.service; import com.facebook.presto.client.*; import com.google.common.collect.Lists; import io.airlift.http.client.HttpClientConfig; import io.airlift.http.client.jetty.JettyHttpClient; import io.airlift.json.JsonCodec; import io.airlift.units.DataSize; import io.airlift.units.Duration; import me.geso.tinyorm.TinyORM; import yanagishima.config.YanagishimaConfig; import yanagishima.exception.QueryErrorException; import yanagishima.result.PrestoQueryResult; import yanagishima.row.Query; import javax.inject.Inject; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.time.ZonedDateTime; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static io.airlift.json.JsonCodec.jsonCodec; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MINUTES; public class PrestoServiceImpl implements PrestoService { private YanagishimaConfig yanagishimaConfig; private JettyHttpClient httpClient; @Inject private TinyORM db; @Inject public PrestoServiceImpl(YanagishimaConfig yanagishimaConfig) { this.yanagishimaConfig = yanagishimaConfig; HttpClientConfig httpClientConfig = new HttpClientConfig().setConnectTimeout(new Duration(10, TimeUnit.SECONDS)); this.httpClient = new JettyHttpClient(httpClientConfig); } @Override public PrestoQueryResult doQuery(String datasource, String query, String userName) throws QueryErrorException { long start = System.currentTimeMillis(); Duration queryMaxRunTime = new Duration(yanagishimaConfig.getQueryMaxRunTimeSeconds(), TimeUnit.SECONDS); try (StatementClient client = getStatementClient(datasource, query, userName)) { while (client.isValid() && (client.current().getData() == null)) { client.advance(); if(System.currentTimeMillis() - start > queryMaxRunTime.toMillis()) { throw new RuntimeException("Query exceeded maximum time limit of " + queryMaxRunTime); } } if ((!client.isFailed()) && (!client.isGone()) && (!client.isClosed())) { QueryResults results = client.isValid() ? client.current() : client.finalResults(); String queryId = results.getId(); if (results.getUpdateType() != null) { PrestoQueryResult prestoQueryResult = new PrestoQueryResult(); prestoQueryResult.setQueryId(queryId); prestoQueryResult.setUpdateType(results.getUpdateType()); insertQueryHistory(datasource, query, queryId); return prestoQueryResult; } else if (results.getColumns() == null) { throw new QueryErrorException(new SQLException(format("Query %s has no columns\n", results.getId()))); } else { PrestoQueryResult prestoQueryResult = new PrestoQueryResult(); prestoQueryResult.setQueryId(queryId); prestoQueryResult.setUpdateType(results.getUpdateType()); List<String> columns = Lists.transform(results.getColumns(), Column::getName); prestoQueryResult.setColumns(columns); List<List<String>> rowDataList = new ArrayList<List<String>>(); processData(client, datasource, queryId, prestoQueryResult, columns, rowDataList); prestoQueryResult.setRecords(rowDataList); insertQueryHistory(datasource, query, queryId); return prestoQueryResult; } } if (client.isClosed()) { throw new RuntimeException("Query aborted by user"); } else if (client.isGone()) { throw new RuntimeException("Query is gone (server restarted?)"); } else if (client.isFailed()) { QueryResults results = client.finalResults(); String queryId = results.getId(); db.insert(Query.class) .value("datasource", datasource) .value("query_id", queryId) .value("fetch_result_time_string", ZonedDateTime.now().toString()) .value("query_string", query) .execute(); Path dst = getResultFilePath(datasource, queryId, true); QueryError error = results.getError(); String message = format("Query failed (#%s): %s", results.getId(), error.getMessage()); try (BufferedWriter bw = Files.newBufferedWriter(dst, StandardCharsets.UTF_8)) { bw.write(message); } catch (IOException e) { throw new RuntimeException(e); } throw resultsException(results); } } throw new RuntimeException("should not reach"); } private void insertQueryHistory(String datasource, String query, String queryId) { if(!query.toLowerCase().startsWith("show") && !query.toLowerCase().startsWith("explain")) { db.insert(Query.class) .value("datasource", datasource) .value("query_id", queryId) .value("fetch_result_time_string", ZonedDateTime.now().toString()) .value("query_string", query) .execute(); } } private void processData(StatementClient client, String datasource, String queryId, PrestoQueryResult prestoQueryResult, List<String> columns, List<List<String>> rowDataList) { int limit = yanagishimaConfig.getSelectLimit(); Path dst = getResultFilePath(datasource, queryId, false); int lineNumber = 0; try (BufferedWriter bw = Files.newBufferedWriter(dst, StandardCharsets.UTF_8)) { bw.write(String.join("\t", columns)); bw.write("\n"); lineNumber++; while (client.isValid()) { Iterable<List<Object>> data = client.current().getData(); if (data != null) { for(List<Object> row : data) { List<String> columnDataList = new ArrayList<>(); List<Object> tmpColumnDataList = row.stream().collect(Collectors.toList()); for (Object tmpColumnData : tmpColumnDataList) { if (tmpColumnData instanceof Long) { columnDataList.add(((Long) tmpColumnData).toString()); } else { if (tmpColumnData == null) { columnDataList.add(null); } else { columnDataList.add(tmpColumnData.toString()); } } } try { bw.write(String.join("\t", columnDataList)); bw.write("\n"); lineNumber++; } catch (IOException e) { throw new RuntimeException(e); } if (client.getQuery().toLowerCase().startsWith("show") || rowDataList.size() < limit) { rowDataList.add(columnDataList); } else { prestoQueryResult.setWarningMessage(String.format("now fetch size is %d. This is more than %d. So, fetch operation stopped.", rowDataList.size(), limit)); } } } client.advance(); } } catch (IOException e) { throw new RuntimeException(e); } prestoQueryResult.setLineNumber(lineNumber); try { long size = Files.size(dst); DataSize rawDataSize = new DataSize(size, DataSize.Unit.BYTE); prestoQueryResult.setRawDataSize(rawDataSize.convertToMostSuccinctDataSize()); } catch (IOException e) { throw new RuntimeException(e); } } private Path getResultFilePath(String datasource, String queryId, boolean error) { String currentPath = new File(".").getAbsolutePath(); String yyyymmdd = queryId.substring(0, 8); File datasourceDir = new File(String.format("%s/result/%s", currentPath, datasource)); if (!datasourceDir.isDirectory()) { datasourceDir.mkdir(); } File yyyymmddDir = new File(String.format("%s/result/%s/%s", currentPath, datasource, yyyymmdd)); if (!yyyymmddDir.isDirectory()) { yyyymmddDir.mkdir(); } if(error) { return Paths.get(String.format("%s/result/%s/%s/%s.err", currentPath, datasource, yyyymmdd, queryId)); } else { return Paths.get(String.format("%s/result/%s/%s/%s.tsv", currentPath, datasource, yyyymmdd, queryId)); } } private StatementClient getStatementClient(String datasource, String query, String userName) { String prestoCoordinatorServer = yanagishimaConfig .getPrestoCoordinatorServer(datasource); String catalog = yanagishimaConfig.getCatalog(datasource); String schema = yanagishimaConfig.getSchema(datasource); String user = null; if(userName == null ) { user = yanagishimaConfig.getUser(); } else { user = userName; } String source = yanagishimaConfig.getSource(); JsonCodec<QueryResults> jsonCodec = jsonCodec(QueryResults.class); ClientSession clientSession = new ClientSession( URI.create(prestoCoordinatorServer), user, source, null, catalog, schema, TimeZone.getDefault().getID(), Locale.getDefault(), new HashMap<String, String>(), null, false, new Duration(2, MINUTES)); return new StatementClient(httpClient, jsonCodec, clientSession, query); } private QueryErrorException resultsException(QueryResults results) { QueryError error = results.getError(); String message = format("Query failed (#%s): %s", results.getId(), error.getMessage()); Throwable cause = (error.getFailureInfo() == null) ? null : error.getFailureInfo().toException(); return new QueryErrorException(results.getId(), error, new SQLException(message, error.getSqlState(), error.getErrorCode(), cause)); } }
src/main/java/yanagishima/service/PrestoServiceImpl.java
package yanagishima.service; import com.facebook.presto.client.*; import com.google.common.collect.Lists; import io.airlift.http.client.HttpClientConfig; import io.airlift.http.client.jetty.JettyHttpClient; import io.airlift.json.JsonCodec; import io.airlift.units.DataSize; import io.airlift.units.Duration; import me.geso.tinyorm.TinyORM; import yanagishima.config.YanagishimaConfig; import yanagishima.exception.QueryErrorException; import yanagishima.result.PrestoQueryResult; import yanagishima.row.Query; import javax.inject.Inject; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.net.URI; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.sql.SQLException; import java.time.ZonedDateTime; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static io.airlift.json.JsonCodec.jsonCodec; import static java.lang.String.format; import static java.util.concurrent.TimeUnit.MINUTES; public class PrestoServiceImpl implements PrestoService { private YanagishimaConfig yanagishimaConfig; private JettyHttpClient httpClient; @Inject private TinyORM db; @Inject public PrestoServiceImpl(YanagishimaConfig yanagishimaConfig) { this.yanagishimaConfig = yanagishimaConfig; HttpClientConfig httpClientConfig = new HttpClientConfig().setConnectTimeout(new Duration(10, TimeUnit.SECONDS)); this.httpClient = new JettyHttpClient(httpClientConfig); } @Override public PrestoQueryResult doQuery(String datasource, String query, String userName) throws QueryErrorException { long start = System.currentTimeMillis(); Duration queryMaxRunTime = new Duration(yanagishimaConfig.getQueryMaxRunTimeSeconds(), TimeUnit.SECONDS); try (StatementClient client = getStatementClient(datasource, query, userName)) { while (client.isValid() && (client.current().getData() == null)) { client.advance(); if(System.currentTimeMillis() - start > queryMaxRunTime.toMillis()) { throw new RuntimeException("Query exceeded maximum time limit of " + queryMaxRunTime); } } if ((!client.isFailed()) && (!client.isGone()) && (!client.isClosed())) { QueryResults results = client.isValid() ? client.current() : client.finalResults(); String queryId = results.getId(); if (results.getUpdateType() != null) { PrestoQueryResult prestoQueryResult = new PrestoQueryResult(); prestoQueryResult.setQueryId(queryId); prestoQueryResult.setUpdateType(results.getUpdateType()); insertQueryHistory(datasource, query, queryId); return prestoQueryResult; } else if (results.getColumns() == null) { throw new QueryErrorException(new SQLException(format("Query %s has no columns\n", results.getId()))); } else { PrestoQueryResult prestoQueryResult = new PrestoQueryResult(); prestoQueryResult.setQueryId(queryId); prestoQueryResult.setUpdateType(results.getUpdateType()); List<String> columns = Lists.transform(results.getColumns(), Column::getName); prestoQueryResult.setColumns(columns); List<List<String>> rowDataList = new ArrayList<List<String>>(); processData(client, datasource, queryId, prestoQueryResult, columns, rowDataList); prestoQueryResult.setRecords(rowDataList); insertQueryHistory(datasource, query, queryId); return prestoQueryResult; } } if (client.isClosed()) { throw new RuntimeException("Query aborted by user"); } else if (client.isGone()) { throw new RuntimeException("Query is gone (server restarted?)"); } else if (client.isFailed()) { QueryResults results = client.finalResults(); String queryId = results.getId(); db.insert(Query.class) .value("datasource", datasource) .value("query_id", queryId) .value("fetch_result_time_string", ZonedDateTime.now().toString()) .value("query_string", query) .execute(); Path dst = getResultFilePath(datasource, queryId, true); QueryError error = results.getError(); String message = format("Query failed (#%s): %s", results.getId(), error.getMessage()); try { try (BufferedWriter bw = Files.newBufferedWriter(dst, StandardCharsets.UTF_8)) { bw.write(message); } } catch (IOException e) { throw new RuntimeException(e); } throw resultsException(results); } } throw new RuntimeException("should not reach"); } private void insertQueryHistory(String datasource, String query, String queryId) { if(!query.toLowerCase().startsWith("show") && !query.toLowerCase().startsWith("explain")) { db.insert(Query.class) .value("datasource", datasource) .value("query_id", queryId) .value("fetch_result_time_string", ZonedDateTime.now().toString()) .value("query_string", query) .execute(); } } private void processData(StatementClient client, String datasource, String queryId, PrestoQueryResult prestoQueryResult, List<String> columns, List<List<String>> rowDataList) { int limit = yanagishimaConfig.getSelectLimit(); Path dst = getResultFilePath(datasource, queryId, false); int lineNumber = 0; try (BufferedWriter bw = Files.newBufferedWriter(dst, StandardCharsets.UTF_8)) { bw.write(String.join("\t", columns)); bw.write("\n"); lineNumber++; while (client.isValid()) { Iterable<List<Object>> data = client.current().getData(); if (data != null) { for(List<Object> row : data) { List<String> columnDataList = new ArrayList<>(); List<Object> tmpColumnDataList = row.stream().collect(Collectors.toList()); for (Object tmpColumnData : tmpColumnDataList) { if (tmpColumnData instanceof Long) { columnDataList.add(((Long) tmpColumnData).toString()); } else { if (tmpColumnData == null) { columnDataList.add(null); } else { columnDataList.add(tmpColumnData.toString()); } } } try { bw.write(String.join("\t", columnDataList)); bw.write("\n"); lineNumber++; } catch (IOException e) { throw new RuntimeException(e); } if (client.getQuery().toLowerCase().startsWith("show") || rowDataList.size() < limit) { rowDataList.add(columnDataList); } else { prestoQueryResult.setWarningMessage(String.format("now fetch size is %d. This is more than %d. So, fetch operation stopped.", rowDataList.size(), limit)); } } } client.advance(); } } catch (IOException e) { throw new RuntimeException(e); } prestoQueryResult.setLineNumber(lineNumber); try { long size = Files.size(dst); DataSize rawDataSize = new DataSize(size, DataSize.Unit.BYTE); prestoQueryResult.setRawDataSize(rawDataSize.convertToMostSuccinctDataSize()); } catch (IOException e) { throw new RuntimeException(e); } } private Path getResultFilePath(String datasource, String queryId, boolean error) { String currentPath = new File(".").getAbsolutePath(); String yyyymmdd = queryId.substring(0, 8); File datasourceDir = new File(String.format("%s/result/%s", currentPath, datasource)); if (!datasourceDir.isDirectory()) { datasourceDir.mkdir(); } File yyyymmddDir = new File(String.format("%s/result/%s/%s", currentPath, datasource, yyyymmdd)); if (!yyyymmddDir.isDirectory()) { yyyymmddDir.mkdir(); } if(error) { return Paths.get(String.format("%s/result/%s/%s/%s.err", currentPath, datasource, yyyymmdd, queryId)); } else { return Paths.get(String.format("%s/result/%s/%s/%s.tsv", currentPath, datasource, yyyymmdd, queryId)); } } private StatementClient getStatementClient(String datasource, String query, String userName) { String prestoCoordinatorServer = yanagishimaConfig .getPrestoCoordinatorServer(datasource); String catalog = yanagishimaConfig.getCatalog(datasource); String schema = yanagishimaConfig.getSchema(datasource); String user = null; if(userName == null ) { user = yanagishimaConfig.getUser(); } else { user = userName; } String source = yanagishimaConfig.getSource(); JsonCodec<QueryResults> jsonCodec = jsonCodec(QueryResults.class); ClientSession clientSession = new ClientSession( URI.create(prestoCoordinatorServer), user, source, null, catalog, schema, TimeZone.getDefault().getID(), Locale.getDefault(), new HashMap<String, String>(), null, false, new Duration(2, MINUTES)); return new StatementClient(httpClient, jsonCodec, clientSession, query); } private QueryErrorException resultsException(QueryResults results) { QueryError error = results.getError(); String message = format("Query failed (#%s): %s", results.getId(), error.getMessage()); Throwable cause = (error.getFailureInfo() == null) ? null : error.getFailureInfo().toException(); return new QueryErrorException(results.getId(), error, new SQLException(message, error.getSqlState(), error.getErrorCode(), cause)); } }
remove unused try
src/main/java/yanagishima/service/PrestoServiceImpl.java
remove unused try
Java
apache-2.0
d6257cf52e2832228c040947b5cfb3c8a2b0af98
0
ThePreviousOne/tachiyomi,inorichi/mangafeed,NoodleMage/tachiyomi,inorichi/tachiyomi,NoodleMage/tachiyomi,ThePreviousOne/tachiyomi,inorichi/tachiyomi,icanit/mangafeed,NerdNumber9/TachiyomiEH,paronos/tachiyomi,inorichi/mangafeed,NoodleMage/tachiyomi,CarlosEsco/tachiyomi,paronos/tachiyomi,NerdNumber9/TachiyomiEH,icanit/mangafeed,paronos/tachiyomi
package eu.kanade.mangafeed.ui.manga.chapter; import android.content.Intent; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.view.ActionMode; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.CheckBox; import android.widget.ImageView; import java.util.List; import butterknife.Bind; import butterknife.ButterKnife; import eu.kanade.mangafeed.R; import eu.kanade.mangafeed.data.database.models.Chapter; import eu.kanade.mangafeed.data.database.models.Manga; import eu.kanade.mangafeed.data.download.DownloadService; import eu.kanade.mangafeed.event.DownloadStatusEvent; import eu.kanade.mangafeed.ui.base.activity.BaseActivity; import eu.kanade.mangafeed.ui.base.fragment.BaseRxFragment; import eu.kanade.mangafeed.ui.decoration.DividerItemDecoration; import eu.kanade.mangafeed.ui.manga.MangaActivity; import eu.kanade.mangafeed.ui.reader.ReaderActivity; import eu.kanade.mangafeed.util.EventBusHook; import eu.kanade.mangafeed.util.ToastUtil; import nucleus.factory.RequiresPresenter; import rx.Observable; @RequiresPresenter(ChaptersPresenter.class) public class ChaptersFragment extends BaseRxFragment<ChaptersPresenter> implements ActionMode.Callback, ChaptersAdapter.OnItemClickListener { @Bind(R.id.chapter_list) RecyclerView chapters; @Bind(R.id.swipe_refresh) SwipeRefreshLayout swipeRefresh; @Bind(R.id.toolbar_bottom) Toolbar toolbarBottom; @Bind(R.id.action_sort) ImageView sortBtn; @Bind(R.id.action_next_unread) ImageView nextUnreadBtn; @Bind(R.id.action_show_unread) CheckBox readCb; @Bind(R.id.action_show_downloaded) CheckBox downloadedCb; private ChaptersAdapter adapter; private ActionMode actionMode; public static ChaptersFragment newInstance() { return new ChaptersFragment(); } @Override public void onCreate(Bundle savedState) { super.onCreate(savedState); setHasOptionsMenu(true); getPresenter().setIsCatalogueManga(isCatalogueManga()); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View view = inflater.inflate(R.layout.fragment_manga_chapters, container, false); ButterKnife.bind(this, view); // Init RecyclerView and adapter chapters.setLayoutManager(new LinearLayoutManager(getActivity())); chapters.addItemDecoration(new DividerItemDecoration(ContextCompat.getDrawable(this.getContext(), R.drawable.line_divider))); adapter = new ChaptersAdapter(this); chapters.setAdapter(adapter); // Set initial values setReadFilter(); setSortIcon(); // Init listeners swipeRefresh.setOnRefreshListener(this::onFetchChapters); readCb.setOnCheckedChangeListener((arg, isChecked) -> getPresenter().setReadFilter(isChecked)); sortBtn.setOnClickListener(v -> { getPresenter().revertSortOrder(); setSortIcon(); }); nextUnreadBtn.setOnClickListener(v -> { Chapter chapter = getPresenter().getNextUnreadChapter(); if (chapter != null) { openChapter(chapter); } else { ToastUtil.showShort(getContext(), R.string.no_next_chapter); } }); return view; } @Override public void onResume() { super.onResume(); registerForEvents(); } @Override public void onPause() { unregisterForEvents(); super.onPause(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.chapters, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_refresh: onFetchChapters(); break; } return super.onOptionsItemSelected(item); } public void onNextChapters(List<Chapter> chapters) { closeActionMode(); adapter.setItems(chapters); } public void onFetchChapters() { swipeRefresh.setRefreshing(true); getPresenter().fetchChapters(); } public void onFetchChaptersFinish() { swipeRefresh.setRefreshing(false); } public boolean isCatalogueManga() { return ((MangaActivity) getActivity()).isCatalogueManga(); } protected void openChapter(Chapter chapter) { getPresenter().onOpenChapter(chapter); Intent intent = ReaderActivity.newIntent(getActivity()); startActivity(intent); } @EventBusHook public void onEventMainThread(DownloadStatusEvent event) { Manga manga = getPresenter().getManga(); // If the download status is from another manga, don't bother if (manga != null && event.getChapter().manga_id != manga.id) return; Chapter chapter; for (int i = 0; i < adapter.getItemCount(); i++) { chapter = adapter.getItem(i); if (event.getChapter().id == chapter.id) { chapter.status = event.getStatus(); adapter.notifyItemChanged(i); break; } } } @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { mode.getMenuInflater().inflate(R.menu.chapter_selection, menu); adapter.setMode(ChaptersAdapter.MODE_MULTI); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return false; } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.action_select_all: return onSelectAll(); case R.id.action_mark_as_read: return onMarkAsRead(getSelectedChapters()); case R.id.action_mark_as_unread: return onMarkAsUnread(getSelectedChapters()); case R.id.action_download: return onDownload(getSelectedChapters()); case R.id.action_delete: return onDelete(getSelectedChapters()); } return false; } @Override public void onDestroyActionMode(ActionMode mode) { adapter.setMode(ChaptersAdapter.MODE_SINGLE); adapter.clearSelection(); actionMode = null; } private Observable<Chapter> getSelectedChapters() { return Observable.from(adapter.getSelectedItems()) .map(adapter::getItem); } public void closeActionMode() { if (actionMode != null) actionMode.finish(); } protected boolean onSelectAll() { adapter.selectAll(); setContextTitle(adapter.getSelectedItemCount()); actionMode.invalidate(); return true; } protected boolean onMarkAsRead(Observable<Chapter> chapters) { getPresenter().markChaptersRead(chapters, true); return true; } protected boolean onMarkAsUnread(Observable<Chapter> chapters) { getPresenter().markChaptersRead(chapters, false); return true; } protected boolean onDownload(Observable<Chapter> chapters) { DownloadService.start(getActivity()); getPresenter().downloadChapters(chapters); closeActionMode(); return true; } protected boolean onDelete(Observable<Chapter> chapters) { getPresenter().deleteChapters(chapters); closeActionMode(); return true; } @Override public boolean onListItemClick(int position) { if (actionMode != null && adapter.getMode() == ChaptersAdapter.MODE_MULTI) { toggleSelection(position); return true; } else { openChapter(adapter.getItem(position)); return false; } } @Override public void onListItemLongClick(int position) { if (actionMode == null) actionMode = ((BaseActivity) getActivity()).startSupportActionMode(this); toggleSelection(position); } private void toggleSelection(int position) { adapter.toggleSelection(position, false); int count = adapter.getSelectedItemCount(); if (count == 0) { actionMode.finish(); } else { setContextTitle(count); actionMode.invalidate(); } } private void setContextTitle(int count) { actionMode.setTitle(getString(R.string.selected_chapters_title, count)); } public void setSortIcon() { if (sortBtn != null) { boolean aToZ = getPresenter().getSortOrder(); sortBtn.setImageResource(!aToZ ? R.drawable.ic_expand_less_white_36dp : R.drawable.ic_expand_more_white_36dp); } } public void setReadFilter() { if (readCb != null) { readCb.setChecked(getPresenter().getReadFilter()); } } }
app/src/main/java/eu/kanade/mangafeed/ui/manga/chapter/ChaptersFragment.java
package eu.kanade.mangafeed.ui.manga.chapter; import android.content.Intent; import android.os.Bundle; import android.support.v4.content.ContextCompat; import android.support.v4.widget.SwipeRefreshLayout; import android.support.v7.view.ActionMode; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.support.v7.widget.Toolbar; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.CheckBox; import android.widget.ImageView; import java.util.List; import butterknife.Bind; import butterknife.ButterKnife; import eu.kanade.mangafeed.R; import eu.kanade.mangafeed.data.database.models.Chapter; import eu.kanade.mangafeed.data.database.models.Manga; import eu.kanade.mangafeed.data.download.DownloadService; import eu.kanade.mangafeed.event.DownloadStatusEvent; import eu.kanade.mangafeed.ui.base.activity.BaseActivity; import eu.kanade.mangafeed.ui.base.fragment.BaseRxFragment; import eu.kanade.mangafeed.ui.decoration.DividerItemDecoration; import eu.kanade.mangafeed.ui.manga.MangaActivity; import eu.kanade.mangafeed.ui.reader.ReaderActivity; import eu.kanade.mangafeed.util.EventBusHook; import eu.kanade.mangafeed.util.ToastUtil; import nucleus.factory.RequiresPresenter; import rx.Observable; @RequiresPresenter(ChaptersPresenter.class) public class ChaptersFragment extends BaseRxFragment<ChaptersPresenter> implements ActionMode.Callback, ChaptersAdapter.OnItemClickListener { @Bind(R.id.chapter_list) RecyclerView chapters; @Bind(R.id.swipe_refresh) SwipeRefreshLayout swipeRefresh; @Bind(R.id.toolbar_bottom) Toolbar toolbarBottom; @Bind(R.id.action_sort) ImageView sortBtn; @Bind(R.id.action_next_unread) ImageView nextUnreadBtn; @Bind(R.id.action_show_unread) CheckBox readCb; @Bind(R.id.action_show_downloaded) CheckBox downloadedCb; private ChaptersAdapter adapter; private ActionMode actionMode; public static ChaptersFragment newInstance() { return new ChaptersFragment(); } @Override public void onCreate(Bundle savedState) { super.onCreate(savedState); setHasOptionsMenu(true); getPresenter().setIsCatalogueManga(isCatalogueManga()); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { // Inflate the layout for this fragment View view = inflater.inflate(R.layout.fragment_manga_chapters, container, false); ButterKnife.bind(this, view); // Init RecyclerView and adapter chapters.setLayoutManager(new LinearLayoutManager(getActivity())); chapters.addItemDecoration(new DividerItemDecoration(ContextCompat.getDrawable(this.getContext(), R.drawable.line_divider))); adapter = new ChaptersAdapter(this); chapters.setAdapter(adapter); // Set initial values setReadFilter(getPresenter().getReadFilter()); setSortIcon(getPresenter().getSortOrder()); // Init listeners swipeRefresh.setOnRefreshListener(this::onFetchChapters); readCb.setOnCheckedChangeListener((arg, isChecked) -> getPresenter().setReadFilter(isChecked)); sortBtn.setOnClickListener(v -> getPresenter().revertSortOrder()); nextUnreadBtn.setOnClickListener(v -> { Chapter chapter = getPresenter().getNextUnreadChapter(); if (chapter != null) { openChapter(chapter); } else { ToastUtil.showShort(getContext(), R.string.no_next_chapter); } }); return view; } @Override public void onResume() { super.onResume(); registerForEvents(); } @Override public void onPause() { unregisterForEvents(); super.onPause(); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.chapters, menu); super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case R.id.action_refresh: onFetchChapters(); break; } return super.onOptionsItemSelected(item); } public void onNextChapters(List<Chapter> chapters) { closeActionMode(); adapter.setItems(chapters); } public void onFetchChapters() { swipeRefresh.setRefreshing(true); getPresenter().fetchChapters(); } public void onFetchChaptersFinish() { swipeRefresh.setRefreshing(false); } public boolean isCatalogueManga() { return ((MangaActivity) getActivity()).isCatalogueManga(); } protected void openChapter(Chapter chapter) { getPresenter().onOpenChapter(chapter); Intent intent = ReaderActivity.newIntent(getActivity()); startActivity(intent); } @EventBusHook public void onEventMainThread(DownloadStatusEvent event) { Manga manga = getPresenter().getManga(); // If the download status is from another manga, don't bother if (manga != null && event.getChapter().manga_id != manga.id) return; Chapter chapter; for (int i = 0; i < adapter.getItemCount(); i++) { chapter = adapter.getItem(i); if (event.getChapter().id == chapter.id) { chapter.status = event.getStatus(); adapter.notifyItemChanged(i); break; } } } @Override public boolean onCreateActionMode(ActionMode mode, Menu menu) { mode.getMenuInflater().inflate(R.menu.chapter_selection, menu); adapter.setMode(ChaptersAdapter.MODE_MULTI); return true; } @Override public boolean onPrepareActionMode(ActionMode mode, Menu menu) { return false; } @Override public boolean onActionItemClicked(ActionMode mode, MenuItem item) { switch (item.getItemId()) { case R.id.action_select_all: return onSelectAll(); case R.id.action_mark_as_read: return onMarkAsRead(getSelectedChapters()); case R.id.action_mark_as_unread: return onMarkAsUnread(getSelectedChapters()); case R.id.action_download: return onDownload(getSelectedChapters()); case R.id.action_delete: return onDelete(getSelectedChapters()); } return false; } @Override public void onDestroyActionMode(ActionMode mode) { adapter.setMode(ChaptersAdapter.MODE_SINGLE); adapter.clearSelection(); actionMode = null; } private Observable<Chapter> getSelectedChapters() { return Observable.from(adapter.getSelectedItems()) .map(adapter::getItem); } public void closeActionMode() { if (actionMode != null) actionMode.finish(); } protected boolean onSelectAll() { adapter.selectAll(); setContextTitle(adapter.getSelectedItemCount()); actionMode.invalidate(); return true; } protected boolean onMarkAsRead(Observable<Chapter> chapters) { getPresenter().markChaptersRead(chapters, true); return true; } protected boolean onMarkAsUnread(Observable<Chapter> chapters) { getPresenter().markChaptersRead(chapters, false); return true; } protected boolean onDownload(Observable<Chapter> chapters) { DownloadService.start(getActivity()); getPresenter().downloadChapters(chapters); closeActionMode(); return true; } protected boolean onDelete(Observable<Chapter> chapters) { getPresenter().deleteChapters(chapters); closeActionMode(); return true; } @Override public boolean onListItemClick(int position) { if (actionMode != null && adapter.getMode() == ChaptersAdapter.MODE_MULTI) { toggleSelection(position); return true; } else { openChapter(adapter.getItem(position)); return false; } } @Override public void onListItemLongClick(int position) { if (actionMode == null) actionMode = ((BaseActivity) getActivity()).startSupportActionMode(this); toggleSelection(position); } private void toggleSelection(int position) { adapter.toggleSelection(position, false); int count = adapter.getSelectedItemCount(); if (count == 0) { actionMode.finish(); } else { setContextTitle(count); actionMode.invalidate(); } } private void setContextTitle(int count) { actionMode.setTitle(getString(R.string.selected_chapters_title, count)); } public void setSortIcon(boolean aToZ) { if (sortBtn != null) { sortBtn.setImageResource(!aToZ ? R.drawable.ic_expand_less_white_36dp : R.drawable.ic_expand_more_white_36dp); } } public void setReadFilter(boolean onlyUnread) { if (readCb != null) readCb.setChecked(onlyUnread); } }
Fix order button
app/src/main/java/eu/kanade/mangafeed/ui/manga/chapter/ChaptersFragment.java
Fix order button
Java
apache-2.0
dd28eb578d2012b221af55b3a35e6d2ec4eed0c9
0
mirkosertic/Bytecoder,mirkosertic/Bytecoder,mirkosertic/Bytecoder,mirkosertic/Bytecoder,mirkosertic/Bytecoder,mirkosertic/Bytecoder
/* * Copyright 2019 Mirko Sertic * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.mirkosertic.bytecoder.core; import de.mirkosertic.bytecoder.unittest.BytecoderUnitTestRunner; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(BytecoderUnitTestRunner.class) public class IfThenElseTest { @Test public void testIfThenElse() { int x = 1; int y = 2; if (y == 2) { for (int k = 0; k< 10; k++) { x = x + 1; } } else { x = x + 3; } y = 6; } private static boolean floatIsDifferent(float f1, float f2, float delta) { if (Float.compare(f1, f2) == 0) { return false; } else { return Math.abs(f1 - f2) > delta; } } private void failNotEquals(String message, float expected, float actual) { throw new AssertionError(); } @Test public void testIf() { String message = "Message"; float expected = 10f; float actual = 10f; float delta = 0; if (floatIsDifferent(expected, actual, delta)) { failNotEquals(message, expected, actual); } } }
core/src/test/java/de/mirkosertic/bytecoder/core/IfThenElseTest.java
/* * Copyright 2019 Mirko Sertic * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.mirkosertic.bytecoder.core; import de.mirkosertic.bytecoder.unittest.BytecoderUnitTestRunner; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(BytecoderUnitTestRunner.class) public class IfThenElseTest { @Test public void testIfThenElse() { int x = 1; int y = 2; if (y == 2) { for (int k = 0; k< 10; k++) { x = x + 1; } } else { x = x + 3; } y = 6; } }
More Tests for If/Then/Else Optimizations
core/src/test/java/de/mirkosertic/bytecoder/core/IfThenElseTest.java
More Tests for If/Then/Else Optimizations
Java
apache-2.0
e962e4e065a29f08eb46bde0305368d8869e7749
0
PRIDE-Archive/web-service-model
package uk.ac.ebi.pride.archive.web.service.model.file; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.wordnik.swagger.annotations.ApiModel; import com.wordnik.swagger.annotations.ApiModelProperty; import uk.ac.ebi.pride.archive.dataprovider.file.ProjectFileSource; import uk.ac.ebi.pride.archive.dataprovider.file.ProjectFileType; import java.io.Serializable; import java.net.URL; /** * @author Florian Reisinger * @since 0.1.6 */ @ApiModel(value = "Details of a dataset file.", description = "Details for one of the dataset files.") @SuppressWarnings("UnusedDeclaration") @JsonIgnoreProperties(ignoreUnknown = true) public class FileDetail implements Serializable { @ApiModelProperty(dataType = "the project the file belongs to") private String projectAccession; @ApiModelProperty(dataType = "the assay the file belongs to") private String assayAccession; @ApiModelProperty(dataType = "string") private ProjectFileType fileType; @ApiModelProperty(value = "SUBMITTED (part of the original dataset) or GENERATED (added to the submission by PRIDE)", dataType = "string") private ProjectFileSource fileSource; @ApiModelProperty(value = "size in bytes") private long fileSize; @ApiModelProperty(value = "the name of the file") private String fileName; @ApiModelProperty(value = "public FTP download link", dataType = "string") private URL ftpDownloadLink; @ApiModelProperty(value = "public Aspera download link", dataType = "string") private String asperaDownloadLink; public String getProjectAccession() { return projectAccession; } public void setProjectAccession(String projectAccession) { this.projectAccession = projectAccession; } public String getAssayAccession() { return assayAccession; } public void setAssayAccession(String assayAccession) { this.assayAccession = assayAccession; } public ProjectFileType getFileType() { return fileType; } public void setFileType(ProjectFileType fileType) { this.fileType = fileType; } public ProjectFileSource getFileSource() { return fileSource; } public void setFileSource(ProjectFileSource fileSource) { this.fileSource = fileSource; } public long getFileSize() { return fileSize; } public void setFileSize(long fileSize) { this.fileSize = fileSize; } public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public URL getFtpDownloadLink() { return ftpDownloadLink; } public void setFtpDownloadLink(URL ftpDownloadLink) { this.ftpDownloadLink = ftpDownloadLink; } public String getAsperaDownloadLink() { return asperaDownloadLink; } public void setAsperaDownloadLink(String asperaDownloadLink) { this.asperaDownloadLink = asperaDownloadLink; } }
src/main/java/uk/ac/ebi/pride/archive/web/service/model/file/FileDetail.java
package uk.ac.ebi.pride.archive.web.service.model.file; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.wordnik.swagger.annotations.ApiModel; import com.wordnik.swagger.annotations.ApiModelProperty; import uk.ac.ebi.pride.archive.dataprovider.file.ProjectFileSource; import uk.ac.ebi.pride.archive.dataprovider.file.ProjectFileType; import java.io.Serializable; import java.net.URL; /** * @author Florian Reisinger * @since 0.1.6 */ @ApiModel(value = "Details of a dataset file.", description = "Details for one of the dataset files.") @SuppressWarnings("UnusedDeclaration") @JsonIgnoreProperties(ignoreUnknown = true) public class FileDetail implements Serializable { @ApiModelProperty(dataType = "the project the file belongs to") private String projectAccession; @ApiModelProperty(dataType = "the assay the file belongs to") private String assayAccession; @ApiModelProperty(dataType = "string") private ProjectFileType fileType; @ApiModelProperty(value = "SUBMITTED (part of the original dataset) or GENERATED (added to the submission by PRIDE)", dataType = "string") private ProjectFileSource fileSource; @ApiModelProperty(value = "size in bytes") private long fileSize; @ApiModelProperty(value = "the name of the file") private String fileName; @ApiModelProperty(value = "public FTP download link", dataType = "string") private URL ftpDownloadLink; @ApiModelProperty(value = "public Aspera download link", dataType = "string") private String asperaDownloadLink; public String getProjectAccession() { return projectAccession; } public void setProjectAccession(String projectAccession) { this.projectAccession = projectAccession; } public String getAssayAccession() { return assayAccession; } public void setAssayAccession(String assayAccession) { this.assayAccession = assayAccession; } public ProjectFileType getFileType() { return fileType; } public void setFileType(ProjectFileType fileType) { this.fileType = fileType; } public ProjectFileSource getFileSource() { return fileSource; } public void setFileSource(ProjectFileSource fileSource) { this.fileSource = fileSource; } public long getFileSize() { return fileSize; } public void setFileSize(long fileSize) { this.fileSize = fileSize; } public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public URL getFtpDownloadLink() { return ftpDownloadLink; } public void setFtpDownloadLink(URL ftpDownloadLink) { this.ftpDownloadLink = ftpDownloadLink; } public String getAsperaDownloadLink() { return asperaDownloadLink; } public void setAsperaDownloadLink(String asperaDownloadLink) { this.asperaDownloadLink = asperaDownloadLink.toLowerCase(); } }
remove comverting aspera link to lower case
src/main/java/uk/ac/ebi/pride/archive/web/service/model/file/FileDetail.java
remove comverting aspera link to lower case
Java
apache-2.0
998a68e4d9b00888bb88b28ed8e578eef137477a
0
SoftTech2018/06_del2
package ftpMain; import java.io.IOException; import java.util.Scanner; import functionality.IZyboTransmitter; import functionality.ZyboTransmitter; public class Main { public static void main(String[] args) throws NumberFormatException, IOException { int port; String host, user, pass; if (args.length == 4) { host = args[0]; port = Integer.parseInt(args[1]); user = args[2]; pass = args[3]; } else { port = 21; host = "ftp.missekat.dk"; user = "missekat.dk"; pass = "jakobmedc"; } Scanner menuScan = new Scanner(System.in); IFTPclient ftpC = new FTPclient(); IMenu menu = new Menu(menuScan); IZyboTransmitter zbtr = new ZyboTransmitter(); IMenuController menuCon = new MenuController(menu, zbtr, ftpC, host, port, user, pass); } }
06_del2/ftpClient/ftpMain/Main.java
package ftpMain; import java.io.IOException; import java.util.Scanner; import functionality.IZyboTransmitter; import functionality.ZyboTransmitter; public class Main { public static void main(String[] args) throws NumberFormatException, IOException { // int port; // String host; // // if (args.length == 2){ // port = Integer.parseInt(args[1]); // host = args[0]; // } // else { // port = 8000; // host = "localhost"; // } Scanner menuScan = new Scanner(System.in); System.out.println("Indtast host (FTP-server)"); String host = menuScan.nextLine(); System.out.println("Indtast port"); String stringport = menuScan.nextLine(); int port = Integer.parseInt(stringport); System.out.println("Indtast bruger: "); String user = menuScan.nextLine(); System.out.println("Indtast password: "); String pass = menuScan.nextLine(); IFTPclient ftpC = new FTPclient(); IMenu menu = new Menu(menuScan); IZyboTransmitter zbtr = new ZyboTransmitter(); IMenuController menuCon = new MenuController(menu, zbtr, ftpC, host, port, user, pass); } }
Tilføjelse: Kan modtage argument
06_del2/ftpClient/ftpMain/Main.java
Tilføjelse: Kan modtage argument
Java
apache-2.0
0b3c7a1325b1c777971a540718479ddc21d5501a
0
danc86/jena-core,danc86/jena-core
/****************************************************************** * File: ReasonerVocabulary.java * Created by: Dave Reynolds * Created on: 04-Jun-2003 * * (c) Copyright 2003, Hewlett-Packard Company, all rights reserved. * [See end of file] * $Id: ReasonerVocabulary.java,v 1.12 2003-08-25 20:58:53 der Exp $ *****************************************************************/ package com.hp.hpl.jena.vocabulary; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.reasoner.ReasonerRegistry; import com.hp.hpl.jena.reasoner.rulesys.RDFSRuleReasoner; /** * A collection of RDF terms used in driving or configuring some of the * builtin reasoners. * * @author <a href="mailto:der@hplb.hpl.hp.com">Dave Reynolds</a> * @version $Revision: 1.12 $ on $Date: 2003-08-25 20:58:53 $ */ public class ReasonerVocabulary { /** The namespace used for system level descriptive properties of any reasoner */ public static String JenaReasonerNS = "http://jena.hpl.hp.com/2003/JenaReasoner#"; /** The RDF class to which all Reasoners belong */ public static Resource ReasonerClass = ResourceFactory.createResource(JenaReasonerNS + "ReasonerClass"); /** Reasoner description property: name of the reasoner */ public static Property nameP; /** Reasoner description property: text description of the reasoner */ public static Property descriptionP; /** Reasoner description property: version of the reasoner */ public static Property versionP; /** Reasoner description property: a schema property supported by the reasoner */ public static Property supportsP; /** Reasoner description property: a configuration property supported by the reasoner */ public static Property configurationP; /** The property that represents the direct/minimal version of the subClassOf relationship */ public static Property directSubClassOf; /** The property that represents the direct/minimal version of the subPropertyOf relationship */ public static Property directSubPropertyOf; /** Base URI used for configuration properties for rule reasoners */ public static final String PropURI = "http://jena.hpl.hp.com/2003/RuleReasoner"; /** Property used to configure the derivation logging behaviour of a reasoner. * Set to "true" to enable logging of derivations. */ public static Property PROPderivationLogging; /** Property used to configure the tracing behaviour of a reasoner. * Set to "true" to enable internal trace message to be sent to Logger.info . */ public static Property PROPtraceOn; /** Property used to set the mode of a generic rule reasoner. * Valid values are the strings "forward", "backward" or "hybrid" */ public static Property PROPruleMode; /** Property used to attach a file a rules to a generic rule reasoner. * Value should a URI giving the rule set to use. */ public static Property PROPruleSet; /** Property used to switch on/off OWL schema translation on a generic rule reasoner. * Value should be "true" to enable OWL translation */ public static Property PROPenableOWLTranslation; /** Property used to switch on/off use of the dedicated subclass/subproperty * caching in a generic rule reasoner. Set to "true" to enable caching. */ public static Property PROPenableTGCCaching; /** Property used to switch on/off scanning of data for container membership * properties in RDFS preprocessing. */ public static Property PROPenableCMPScan; /** Property used to switch to different RDFS processing levles. The * legal levels are "default", "simple", and "full". */ public static Property PROPsetRDFSLevel; /** Constant for PROPsetRDFSLevel - default behaviour */ public static String RDFS_DEFAULT = RDFSRuleReasoner.DEFAULT_RULES; /** Constant for PROPsetRDFSLevel - fullest implementation supported. */ public static String RDFS_FULL = RDFSRuleReasoner.FULL_RULES; /** Constant for PROPsetRDFSLevel - simplified, higher performance rules. */ public static String RDFS_SIMPLE = RDFSRuleReasoner.SIMPLE_RULES; /** A namespace used for Rubrik specific properties */ public static final String RBNamespace = "urn:x-hp-jena:rubrik/"; /** Property used to switch on validation in owl ruleset */ public static final Property RB_VALIDATION = ResourceFactory.createProperty(RBNamespace, "validation"); /** Property used for validation reports in owl ruleset */ public static final Property RB_VALIDATION_REPORT = ResourceFactory.createProperty(RBNamespace, "violation"); // -------------------------------------------------------------------- // Method versions of key namespaces which are more initializer friendly /** Return namespace used for Rubric specific properties */ public static final String getRBNamespace() { return RBNamespace; } /** Return namespace used for system level descriptive properties of any reasoner */ public static final String getJenaReasonerNS() { return JenaReasonerNS; } // -------------------------------------------------------------------- // Initializers static { try { nameP = ResourceFactory.createProperty(JenaReasonerNS, "name"); descriptionP = ResourceFactory.createProperty(JenaReasonerNS, "description"); versionP = ResourceFactory.createProperty(JenaReasonerNS, "version"); supportsP = ResourceFactory.createProperty(JenaReasonerNS, "supports"); configurationP = ResourceFactory.createProperty(JenaReasonerNS, "configurationProperty"); directSubClassOf = ResourceFactory.createProperty(ReasonerRegistry.makeDirect(RDFS.subClassOf.getNode()).getURI()); directSubPropertyOf = ResourceFactory.createProperty(ReasonerRegistry.makeDirect(RDFS.subPropertyOf.getNode()).getURI()); PROPderivationLogging = ResourceFactory.createProperty(PropURI+"#", "derivationLogging"); PROPtraceOn = ResourceFactory.createProperty(PropURI+"#", "traceOn"); PROPruleMode = ResourceFactory.createProperty(PropURI+"#", "ruleMode"); PROPruleSet = ResourceFactory.createProperty(PropURI+"#", "ruleSet"); PROPenableOWLTranslation = ResourceFactory.createProperty(PropURI+"#", "enableOWLTranslation"); PROPenableTGCCaching = ResourceFactory.createProperty(PropURI+"#", "enableTGCCaching"); PROPenableCMPScan = ResourceFactory.createProperty(PropURI+"#", "enableCMPScan"); PROPsetRDFSLevel = ResourceFactory.createProperty(PropURI+"#", "setRDFSLevel"); } catch (Exception e) { System.err.println("Initialization error: " + e); e.printStackTrace(System.err); } } } /* (c) Copyright Hewlett-Packard Company 2003 All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
src/com/hp/hpl/jena/vocabulary/ReasonerVocabulary.java
/****************************************************************** * File: ReasonerVocabulary.java * Created by: Dave Reynolds * Created on: 04-Jun-2003 * * (c) Copyright 2003, Hewlett-Packard Company, all rights reserved. * [See end of file] * $Id: ReasonerVocabulary.java,v 1.11 2003-08-24 21:20:19 der Exp $ *****************************************************************/ package com.hp.hpl.jena.vocabulary; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.reasoner.ReasonerRegistry; import com.hp.hpl.jena.reasoner.rulesys.RDFSRuleReasoner; /** * A collection of RDF terms used in driving or configuring some of the * builtin reasoners. * * @author <a href="mailto:der@hplb.hpl.hp.com">Dave Reynolds</a> * @version $Revision: 1.11 $ on $Date: 2003-08-24 21:20:19 $ */ public class ReasonerVocabulary { /** The namespace used for system level descriptive properties of any reasoner */ public static String JenaReasonerNS = "http://jena.hpl.hp.com/2003/JenaReasoner#"; /** The RDF class to which all Reasoners belong */ public static Resource ReasonerClass = ResourceFactory.createResource(JenaReasonerNS + "ReasonerClass"); /** Reasoner description property: name of the reasoner */ public static Property nameP; /** Reasoner description property: text description of the reasoner */ public static Property descriptionP; /** Reasoner description property: version of the reasoner */ public static Property versionP; /** Reasoner description property: a schema property supported by the reasoner */ public static Property supportsP; /** Reasoner description property: a configuration property supported by the reasoner */ public static Property configurationP; /** The property that represents the direct/minimal version of the subClassOf relationship */ public static Property directSubClassOf; /** The property that represents the direct/minimal version of the subPropertyOf relationship */ public static Property directSubPropertyOf; /** Base URI used for configuration properties for rule reasoners */ public static final String PropURI = "http://jena.hpl.hp.com/2003/RuleReasoner"; /** Property used to configure the derivation logging behaviour of a reasoner. * Set to "true" to enable logging of derivations. */ public static Property PROPderivationLogging; /** Property used to configure the tracing behaviour of a reasoner. * Set to "true" to enable internal trace message to be sent to Logger.info . */ public static Property PROPtraceOn; /** Property used to set the mode of a generic rule reasoner. * Valid values are the strings "forward", "backward" or "hybrid" */ public static Property PROPruleMode; /** Property used to attach a file a rules to a generic rule reasoner. * Value should a URI giving the rule set to use. */ public static Property PROPruleSet; /** Property used to switch on/off OWL schema translation on a generic rule reasoner. * Value should be "true" to enable OWL translation */ public static Property PROPenableOWLTranslation; /** Property used to switch on/off use of the dedicated subclass/subproperty * caching in a generic rule reasoner. Set to "true" to enable caching. */ public static Property PROPenableTGCCaching; /** Property used to switch on/off scanning of data for container membership * properties in RDFS preprocessing. */ public static Property PROPenableCMPScan; /** Property used to switch to different RDFS processing levles. The * legal levels are "default", "simple", and "full". */ public static Property PROPsetRDFSLevel; /** Constant for PROPsetRDFSLevel default */ public static String RDFS_DEFAULT = RDFSRuleReasoner.DEFAULT_RULES; /** Constant for PROPsetRDFSLevel default */ public static String RDFS_FULL = RDFSRuleReasoner.FULL_RULES; /** Constant for PROPsetRDFSLevel default */ public static String RDFS_SIMPLE = RDFSRuleReasoner.SIMPLE_RULES; /** A namespace used for Rubric specific properties */ public static final String RBNamespace = "urn:x-hp-jena:rubrik/"; /** Property used to switch on validation in owl ruleset */ public static final Property RB_VALIDATION = ResourceFactory.createProperty(RBNamespace, "validation"); /** Property used for validation reports in owl ruleset */ public static final Property RB_VALIDATION_REPORT = ResourceFactory.createProperty(RBNamespace, "violation"); // -------------------------------------------------------------------- // Method versions of key namespaces which are more initializer friendly /** Return namespace used for Rubric specific properties */ public static final String getRBNamespace() { return RBNamespace; } /** Return namespace used for system level descriptive properties of any reasoner */ public static final String getJenaReasonerNS() { return JenaReasonerNS; } // -------------------------------------------------------------------- // Initializers static { try { nameP = ResourceFactory.createProperty(JenaReasonerNS, "name"); descriptionP = ResourceFactory.createProperty(JenaReasonerNS, "description"); versionP = ResourceFactory.createProperty(JenaReasonerNS, "version"); supportsP = ResourceFactory.createProperty(JenaReasonerNS, "supports"); configurationP = ResourceFactory.createProperty(JenaReasonerNS, "configurationProperty"); directSubClassOf = ResourceFactory.createProperty(ReasonerRegistry.makeDirect(RDFS.subClassOf.getNode()).getURI()); directSubPropertyOf = ResourceFactory.createProperty(ReasonerRegistry.makeDirect(RDFS.subPropertyOf.getNode()).getURI()); PROPderivationLogging = ResourceFactory.createProperty(PropURI+"#", "derivationLogging"); PROPtraceOn = ResourceFactory.createProperty(PropURI+"#", "traceOn"); PROPruleMode = ResourceFactory.createProperty(PropURI+"#", "ruleMode"); PROPruleSet = ResourceFactory.createProperty(PropURI+"#", "ruleSet"); PROPenableOWLTranslation = ResourceFactory.createProperty(PropURI+"#", "enableOWLTranslation"); PROPenableTGCCaching = ResourceFactory.createProperty(PropURI+"#", "enableTGCCaching"); PROPenableCMPScan = ResourceFactory.createProperty(PropURI+"#", "enableCMPScan"); PROPsetRDFSLevel = ResourceFactory.createProperty(PropURI+"#", "setRDFSLevel"); } catch (Exception e) { System.err.println("Initialization error: " + e); e.printStackTrace(System.err); } } } /* (c) Copyright Hewlett-Packard Company 2003 All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
Type fix git-svn-id: 227c23bb629cf7bef445105b977924772e49ae4f@1110732 13f79535-47bb-0310-9956-ffa450edef68
src/com/hp/hpl/jena/vocabulary/ReasonerVocabulary.java
Type fix
Java
apache-2.0
c75de9960ec905aa9f01c97e30cd20063776a37d
0
biezhi/blade,biezhi/blade
/** * Copyright (c) 2018, biezhi 王爵 nice (biezhi.me@gmail.com) * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blade.server.netty; import com.blade.mvc.handler.ExceptionHandler; import com.blade.mvc.http.HttpRequest; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.*; import lombok.extern.slf4j.Slf4j; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Merge Netty HttpObject as {@link HttpRequest} * * @author biezhi * 2018/10/15 */ @Slf4j public class MergeRequestHandler extends SimpleChannelInboundHandler<HttpObject> { private HttpRequest httpRequest; @Override protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) { if (msg instanceof io.netty.handler.codec.http.HttpRequest) { httpRequest = new HttpRequest(); httpRequest.setNettyRequest((io.netty.handler.codec.http.HttpRequest) msg); return; } if (null != httpRequest && msg instanceof HttpContent) { httpRequest.appendContent((HttpContent) msg); } if (msg instanceof LastHttpContent) { if (null != httpRequest) { ctx.fireChannelRead(httpRequest); } else { ctx.fireChannelRead(msg); } } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { if (!ExceptionHandler.isResetByPeer(cause)) { log.error(cause.getMessage(), cause); FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, HttpResponseStatus.valueOf(500)); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } } }
src/main/java/com/blade/server/netty/MergeRequestHandler.java
/** * Copyright (c) 2018, biezhi 王爵 nice (biezhi.me@gmail.com) * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.blade.server.netty; import com.blade.mvc.handler.ExceptionHandler; import com.blade.mvc.http.HttpRequest; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.SimpleChannelInboundHandler; import io.netty.handler.codec.http.*; import lombok.extern.slf4j.Slf4j; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; /** * Merge Netty HttpObject as {@link HttpRequest} * * @author biezhi * 2018/10/15 */ @Slf4j public class MergeRequestHandler extends SimpleChannelInboundHandler<HttpObject> { private HttpRequest httpRequest; @Override protected void channelRead0(ChannelHandlerContext ctx, HttpObject msg) { if (msg instanceof io.netty.handler.codec.http.HttpRequest) { httpRequest = new HttpRequest(); httpRequest.setNettyRequest((io.netty.handler.codec.http.HttpRequest) msg); return; } if (msg instanceof HttpContent) { httpRequest.appendContent((HttpContent) msg); } if (msg instanceof LastHttpContent) { if (null != httpRequest) { ctx.fireChannelRead(httpRequest); } else { ctx.fireChannelRead(msg); } } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { if (!ExceptionHandler.isResetByPeer(cause)) { log.error(cause.getMessage(), cause); FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, HttpResponseStatus.valueOf(500)); ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } } }
:bug: fixed reading http body incomplete
src/main/java/com/blade/server/netty/MergeRequestHandler.java
:bug: fixed reading http body incomplete
Java
bsd-2-clause
e8622afe5e1cda79d70d3b712b565e4d479b691d
0
scijava/scijava-common
/* * #%L * SciJava Common shared library for SciJava software. * %% * Copyright (C) 2009 - 2014 Board of Regents of the University of * Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck * Institute of Molecular Cell Biology and Genetics. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package org.scijava.test; import java.io.File; import java.io.IOException; import java.net.URL; import java.util.AbstractMap; import java.util.Map; import org.scijava.util.ClassUtils; import org.scijava.util.FileUtils; /** * A bunch of helpful functions for unit tests. * * @author Johannes Schindelin */ public class TestUtils { /** * Makes a temporary directory for use with unit tests. * <p> * When the unit test runs in a Maven context, the temporary directory will be * created in the <i>target/</i> directory corresponding to the calling class * instead of <i>/tmp/</i>. * </p> * * @param prefix the prefix for the directory's name * @return the reference to the newly-created temporary directory * @throws IOException */ public static File createTemporaryDirectory(final String prefix) throws IOException { final Map.Entry<Class<?>, String> calling = getCallingCodeLocation(null); return createTemporaryDirectory(prefix, calling.getKey(), calling.getValue()); } /** * Makes a temporary directory for use with unit tests. * <p> * When the unit test runs in a Maven context, the temporary directory will be * created in the corresponding <i>target/</i> directory instead of * <i>/tmp/</i>. * </p> * * @param prefix the prefix for the directory's name * @param forClass the class for context (to determine whether there's a * <i>target/<i> directory) * @return the reference to the newly-created temporary directory * @throws IOException */ public static File createTemporaryDirectory(final String prefix, final Class<?> forClass) throws IOException { return createTemporaryDirectory(prefix, forClass, ""); } /** * Makes a temporary directory for use with unit tests. * <p> * When the unit test runs in a Maven context, the temporary directory will be * created in the corresponding <i>target/</i> directory instead of * <i>/tmp/</i>. * </p> * * @param prefix the prefix for the directory's name * @param forClass the class for context (to determine whether there's a * <i>target/<i> directory) * @param suffix the suffix for the directory's name * @return the reference to the newly-created temporary directory * @throws IOException */ public static File createTemporaryDirectory(final String prefix, final Class<?> forClass, final String suffix) throws IOException { final URL directory = ClassUtils.getLocation(forClass); if (directory != null && "file".equals(directory.getProtocol())) { final String path = directory.getPath(); if (path != null && path.endsWith("/target/test-classes/")) { final File baseDirectory = new File(path.substring(0, path.length() - 13)); final File file = new File(baseDirectory, prefix + suffix); if (file.exists()) FileUtils.deleteRecursively(file); if (!file.mkdir()) throw new IOException("Could not make directory " + file); return file; } } return FileUtils.createTemporaryDirectory(prefix, suffix); } /** * Returns the class of the caller (excluding the specified class). * <p> * Sometimes it is convenient to determine the caller's context, e.g. to * determine whether running in a maven-surefire-plugin context (in which case * the location of the caller's class would end in * <i>target/test-classes/</i>). * </p> * * @param excluding the class to exclude (or null) * @return the class of the caller */ public static Class<?> getCallingClass(final Class<?> excluding) { return getCallingCodeLocation(excluding).getKey(); } /** * Returns the class and the method/line number of the caller (excluding the specified class). * <p> * Sometimes it is convenient to determine the caller's context, e.g. to * determine whether running in a maven-surefire-plugin context (in which case * the location of the caller's class would end in * <i>target/test-classes/</i>). * </p> * * @param excluding the class to exclude (or null) * @return the class of the caller and the method and line number */ public static Map.Entry<Class<?>, String> getCallingCodeLocation(final Class<?> excluding) { final String thisClassName = TestUtils.class.getName(); final String thisClassName2 = excluding == null ? null : excluding.getName(); final Thread currentThread = Thread.currentThread(); for (final StackTraceElement element : currentThread.getStackTrace()) { final String thatClassName = element.getClassName(); if (thatClassName == null || thatClassName.equals(thisClassName) || thatClassName.equals(thisClassName2) || thatClassName.startsWith("java.lang.")) { continue; } final ClassLoader loader = currentThread.getContextClassLoader(); final Class<?> clazz; try { clazz = loader.loadClass(element.getClassName()); } catch (ClassNotFoundException e) { throw new UnsupportedOperationException("Could not load " + element.getClassName() + " with the current context class loader (" + loader + ")!"); } final String suffix = element.getMethodName() + "-L" + element.getLineNumber(); return new AbstractMap.SimpleEntry<Class<?>, String>(clazz, suffix); } throw new UnsupportedOperationException("No calling class outside " + thisClassName + " found!"); } }
src/main/java/org/scijava/test/TestUtils.java
/* * #%L * SciJava Common shared library for SciJava software. * %% * Copyright (C) 2009 - 2014 Board of Regents of the University of * Wisconsin-Madison, Broad Institute of MIT and Harvard, and Max Planck * Institute of Molecular Cell Biology and Genetics. * %% * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * #L% */ package org.scijava.test; import java.io.File; import java.io.IOException; import java.net.URL; import org.scijava.util.ClassUtils; import org.scijava.util.FileUtils; /** * A bunch of helpful functions for unit tests. * * @author Johannes Schindelin */ public class TestUtils { /** * Makes a temporary directory for use with unit tests. * <p> * When the unit test runs in a Maven context, the temporary directory will be * created in the <i>target/</i> directory corresponding to the calling class * instead of <i>/tmp/</i>. * </p> * * @param prefix the prefix for the directory's name * @return the reference to the newly-created temporary directory * @throws IOException */ public static File createTemporaryDirectory(final String prefix) throws IOException { return createTemporaryDirectory(prefix, getCallingClass(null)); } /** * Makes a temporary directory for use with unit tests. * <p> * When the unit test runs in a Maven context, the temporary directory will be * created in the corresponding <i>target/</i> directory instead of * <i>/tmp/</i>. * </p> * * @param prefix the prefix for the directory's name * @param forClass the class for context (to determine whether there's a * <i>target/<i> directory) * @return the reference to the newly-created temporary directory * @throws IOException */ public static File createTemporaryDirectory(final String prefix, final Class<?> forClass) throws IOException { final URL directory = ClassUtils.getLocation(forClass); if (directory != null && "file".equals(directory.getProtocol())) { final String path = directory.getPath(); if (path != null && path.endsWith("/target/test-classes/")) { final File baseDirectory = new File(path.substring(0, path.length() - 13)); final File file = File.createTempFile(prefix, "", baseDirectory); if (file.delete() && file.mkdir()) return file; } } return FileUtils.createTemporaryDirectory(prefix, ""); } /** * Returns the class of the caller (excluding the specified class). * <p> * Sometimes it is convenient to determine the caller's context, e.g. to * determine whether running in a maven-surefire-plugin context (in which case * the location of the caller's class would end in * <i>target/test-classes/</i>). * </p> * * @param excluding the class to exclude (or null) * @return the class of the caller */ public static Class<?> getCallingClass(final Class<?> excluding) { final String thisClassName = TestUtils.class.getName(); final String thisClassName2 = excluding == null ? null : excluding.getName(); final Thread currentThread = Thread.currentThread(); for (final StackTraceElement element : currentThread.getStackTrace()) { final String thatClassName = element.getClassName(); if (thatClassName == null || thatClassName.equals(thisClassName) || thatClassName.equals(thisClassName2) || thatClassName.startsWith("java.lang.")) { continue; } final ClassLoader loader = currentThread.getContextClassLoader(); try { return loader.loadClass(element.getClassName()); } catch (ClassNotFoundException e) { throw new UnsupportedOperationException("Could not load " + element.getClassName() + " with the current context class loader (" + loader + ")!"); } } throw new UnsupportedOperationException("No calling class outside " + thisClassName + " found!"); } }
Port TestUtils enhancements from MiniMaven MiniMaven already used a fork of SciJava common's TestUtils class, and even enhanced the naming of the temporary directories. Let's benefit from those improvements everywhere. Signed-off-by: Johannes Schindelin <53fb8db7833fca1e1746dd8592f61048d350f64c@gmx.de>
src/main/java/org/scijava/test/TestUtils.java
Port TestUtils enhancements from MiniMaven
Java
bsd-3-clause
1886a7cb2ed9a4a7c9136f23b98b0d0d0922c7a5
0
lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,lockss/lockss-daemon,lockss/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon,edina/lockss-daemon
/* * $Id$ */ /* Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.util; import java.util.*; import java.io.*; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.text.*; import java.text.Normalizer.Form; import java.lang.reflect.*; import java.math.BigInteger; import org.apache.oro.text.regex.*; import org.apache.commons.lang3.StringUtils; /** * This is a class to contain generic string utilities * */ public class StringUtil { private static final String DEFAULT_COLLECTION_SEPARATOR = ", "; private static final String NULL_OBJECT_PRINTABLE_TEXT = "(null)"; private static final Logger log = Logger.getLogger(StringUtil.class); /** * Find the longest common prefix of a pair of strings. Case sensitive. * @param s1 a string * @param s2 another string * @return the longest common prefix, which may be the emopty string */ public static String commonPrefix(String s1, String s2) { char[] c1 = s1.toCharArray(); char[] c2 = s2.toCharArray(); StringBuilder sb = new StringBuilder(); for (int i=0; i<Math.min(c1.length, c2.length); i++) { if (c1[i]==c2[i]) sb.append(c1[i]); else break; } return sb.toString(); } /** * Find the longest common suffix of a pair of strings. Case sensitive. * @param s1 a string * @param s2 another string * @return the longest common suffix, which may be the emopty string */ public static String commonSuffix(String s1, String s2) { char[] c1 = s1.toCharArray(); char[] c2 = s2.toCharArray(); StringBuilder sb = new StringBuilder(); for (int i=1; i<=Math.min(c1.length, c2.length); i++) { if (c1[c1.length-i]==c2[c2.length-i]) sb.append(c1[c1.length-i]); else break; } return sb.reverse().toString(); } /** * Replace all occurrences of oldstr in source with newstr * @param source string to be modified * @param oldstr string to be replace * @param newstr string to replace oldstr * @return new string with oldstr replaced by newstr */ public static String replaceString(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int thisIdx = source.indexOf(oldstr); if (thisIdx < 0) { return source; } int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); int oldIdx = 0; do { for (int ix = oldIdx; ix < thisIdx; ix++) { sb.append(source.charAt(ix)); } sb.append(newstr); oldIdx = thisIdx + oldLen; } while ((thisIdx = source.indexOf(oldstr, oldIdx)) >= 0); for (int ix = oldIdx; ix < sourceLen; ix++) { sb.append(source.charAt(ix)); } return sb.toString(); } public static String replaceFirst(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int index = source.indexOf(oldstr); if (index < 0) { return source; } else { int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); sb.append(source.substring(0, index)); sb.append(newstr); if (index + oldLen < sourceLen) { sb.append(source.substring(index + oldLen)); } return sb.toString(); } } public static String replaceLast(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int index = source.lastIndexOf(oldstr); if (index < 0) { return source; } else { int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); sb.append(source.substring(0, index)); sb.append(newstr); if (index + oldLen < sourceLen) { sb.append(source.substring(index + oldLen)); } return sb.toString(); } } /** * Concatenate elements of collection into string, separated by commas * @param c - Collection of object (on which toString() will be called) * @return Concatenated string */ public static String separatedString(Collection c) { return separatedString(c, DEFAULT_COLLECTION_SEPARATOR); } /** * Concatenate elements of collection into string, with separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(Collection c, String separator) { return separatedString(c, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of object array into string, with separators * @param arr - Array of object (on which toString() will be called) * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(Object[] arr, String separator) { return separatedString(ListUtil.fromArray(arr), "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of int array into string, with separators * @param arr - Array of int elements * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(int[] arr, String separator) { ArrayList col = new ArrayList(arr.length); for (int ii = 0 ; ii < arr.length ; ++ii) { col.add(Integer.toString(arr[ii])); } return separatedString(col, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of long array into string, with separators * @param arr - Array of int elements * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(long[] arr, String separator) { ArrayList col = new ArrayList(arr.length); for (int ii = 0 ; ii < arr.length ; ++ii) { col.add(Long.toString(arr[ii])); } return separatedString(col, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, with separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param sb - StringBuilder to write result into * @return sb */ public static StringBuilder separatedString(Collection c, String separator, StringBuilder sb) { return separatedString(c, "", separator, "", sb); } /** * Concatenate elements of collection into string, delimiting each element, * adding separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param delimiter - String with which to surround each element * @return Concatenated string */ public static String separatedDelimitedString(Collection c, String separator, String delimiter) { return separatedString(c, delimiter, delimiter + separator + delimiter, delimiter, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, delimiting each element, * adding separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param delimiter1 - String with which to prefix each element * @param delimiter2 - String with which to suffix each element * @return Concatenated string */ public static String separatedDelimitedString(Collection c, String separator, String delimiter1, String delimiter2) { return separatedString(c, delimiter1, delimiter2 + separator + delimiter1, delimiter2, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * terminating with terminator * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param terminator - String with which to terminate result * @return Concatenated string */ public static String terminatedSeparatedString(Collection c, String separator, String terminator) { return separatedString(c, "", separator, terminator, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * delimitig each element * @param c - Collection of object (on which toString() will be called) * @param separatorFirst - String to place before first element * @param separatorInner - String with which to separate elements * @param separatorLast - String to place after last element * @return Concatenated string */ public static String separatedString(Collection c, String separatorFirst, String separatorInner, String separatorLast) { return separatedString(c, separatorFirst, separatorInner, separatorLast, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * delimitig each element * @param c - Collection of object (on which toString() will be called) * @param separatorFirst - String to place before first element * @param separatorInner - String with which to separate elements * @param separatorLast - String to place after last element * @param sb - StringBuilder to write result into * @return sb */ public static StringBuilder separatedString(Collection c, String separatorFirst, String separatorInner, String separatorLast, StringBuilder sb) { if (c == null) { return sb; } Iterator iter = c.iterator(); boolean first = true; while (iter.hasNext()) { if (first) { first = false; sb.append(separatorFirst); } else { sb.append(separatorInner); } Object obj = iter.next(); sb.append(obj == null ? NULL_OBJECT_PRINTABLE_TEXT : obj.toString()); } if (!first) { sb.append(separatorLast); } return sb; } /** Break a string at a separator char, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. * @param trimEachString is true, each string in the result will be trim()ed */ public static Vector<String> breakAt(String s, char sep, int maxItems, boolean discardEmptyStrings, boolean trimEachString) { Vector<String> res = new Vector<String>(); int len; if (s == null || (len = s.length()) == 0) { return res; } if (maxItems <= 0) { maxItems = Integer.MAX_VALUE; } for (int pos = 0; maxItems > 0; maxItems-- ) { int end = s.indexOf(sep, pos); if (end == -1) { if (pos > len) { break; } end = len; } if (!discardEmptyStrings || pos != end) { String str = s.substring(pos, end); if (trimEachString) { str = str.trim(); } if (!discardEmptyStrings || str.length() != 0) { res.addElement(str); } } pos = end + 1; } return res; } /** Break a string at a separator string, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator string * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. * @param trimEachString is true, each string in the result will be trim()ed */ public static Vector<String> breakAt(String s, String sep, int maxItems, boolean discardEmptyStrings, boolean trimEachString) { Vector res = new Vector(); int len; if (s == null || (len = s.length()) == 0) { return res; } if (maxItems <= 0) { maxItems = Integer.MAX_VALUE; } for (int pos = 0; maxItems > 0; maxItems-- ) { int end = s.indexOf(sep, pos); if (end == -1) { if (pos > len) { break; } end = len; } if (!discardEmptyStrings || pos != end) { String str = s.substring(pos, end); if (trimEachString) { str = str.trim(); } if (!discardEmptyStrings || str.length() != 0) { res.addElement(str); } } pos = end + sep.length(); } return res; } /** Break a string at a separator char, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. */ public static Vector<String> breakAt(String s, char sep, int maxItems, boolean discardEmptyStrings) { return breakAt(s, sep, maxItems, discardEmptyStrings, false); } /** Break a string at a separator char, returning a vector of strings. * Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator char */ public static Vector<String> breakAt(String s, char sep) { return breakAt(s, sep, 0); } /** Break a string at a separator char, returning a vector of at most * maxItems strings. Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. */ public static Vector<String> breakAt(String s, char sep, int maxItems) { return breakAt(s, sep, maxItems, false); } /** Break a string at a separator String, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. */ public static Vector<String> breakAt(String s, String sep, int maxItems, boolean discardEmptyStrings) { return breakAt(s, sep, maxItems, discardEmptyStrings, false); } /** Break a string at a separator String, returning a vector of strings. * Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator String */ public static Vector<String> breakAt(String s, String sep) { return breakAt(s, sep, 0); } /** Break a string at a separator String, returning a vector of at most * maxItems strings. Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. */ public static Vector<String> breakAt(String s, String sep, int maxItems) { return breakAt(s, sep, maxItems, false); } /** Break a string at a separator String, returning a vector of strings. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param trimAndDiscardEmpty if true, each string is trim()ed, and empty * strings (caused by delimiters at the start or end of the string, or * adjacent delimiters) will not be included in the result. */ public static Vector<String> breakAt(String s, String sep, boolean trimAndDiscardEmpty) { return breakAt(s, sep, 0, true, true); } /** * Trim the end off of a string starting at the first occurrence of any * of the characters specified. * * @param str String to trim * @param chars String containing the chars to trim at * @return str turncated at the first occurrence of any of the chars, or * the original string if no occurrence */ public static String truncateAtAny(String str, String chars) { if (str == null) { return null; } if (chars != null) { for (int jx=0, len = chars.length(); jx < len; jx++) { int pos = str.indexOf(chars.charAt(jx)); if (pos >= 0) { str = str.substring(0, pos); } } } return str; } /** * Trim the end off of a string starting at the specified character. * * @param str String to trim * @param chr char to trim at * @return str turncated at the first occurrence of char, or * the original string if no occurrence */ public static String truncateAt(String str, char chr) { if (str == null) { return null; } int pos = str.indexOf(chr); if (pos < 0) { return str; } return str.substring(0, pos); } /** If string is longer than len, replace characters in the middle with * an elipsis so that the string is no longer than len * @param s the string * @param len maximum length of returned string */ public static String elideMiddleToMaxLen(String s, int len) { if (s == null || s.length() <= len) { return s; } int split = len / 2; return s.substring(0, split) + "..." + s.substring(s.length() - split); } /** Like indexOf except is case-independent */ public static int indexOfIgnoreCase(String str, String substr) { return indexOfIgnoreCase(str, substr, 0); } /** Like indexOf except is case-independent */ public static int indexOfIgnoreCase(String str, String substr, int fromIndex) { if (str == null || substr == null) { return -1; } int sublen = substr.length(); int last = str.length() - sublen; for (int ix = fromIndex; ix <= last; ix++) { if (str.regionMatches(true, ix, substr, 0, sublen)) { return ix; } } return -1; } /** Like endsWith except is case-independent */ public static boolean endsWithIgnoreCase(String str, String end) { int lend = end.length(); return str.regionMatches(true, str.length() - lend, end, 0, lend); } /** Like startsWith except is case-independent */ public static boolean startsWithIgnoreCase(String str, String start) { return str.regionMatches(true, 0, start, 0, start.length()); } /** Return true if the string has any consecutive repeated characters */ public static boolean hasRepeatedChar(String str) { if (str.length() < 2) { return false; } for (int ix = str.length() - 2; ix >= 0; ix--) { if (str.charAt(ix) == str.charAt(ix+1)) { return true; } } return false; } /** Remove the substring beginning with the final occurrence of the * separator, if any. */ public static String upToFinal(String str, String sep) { int pos = str.lastIndexOf(sep); if (pos < 0) { return str; } return str.substring(0, pos); } /** Iff the string ends with <code>end</code>, remove it. */ public static String removeTrailing(String str, String end) { if (str.endsWith(end)) { return str.substring(0, str.length() - end.length()); } return str; } /* Return the substring following the final dot */ public static String shortName(Object object) { if (object == null) { return null; } String name = object.toString(); return name.substring(name.lastIndexOf('.')+1); } /* Return the non-qualified name of the class */ public static String shortName(Class clazz) { String className = clazz.getName(); return className.substring(className.lastIndexOf('.')+1); } /* Return the non-qualified name of the method (Class.method) */ public static String shortName(Method method) { return shortName(method.getDeclaringClass()) + "." + method.getName(); } public static String sanitizeToIdentifier(String name) { StringBuilder sb = new StringBuilder(); for (int ix = 0; ix < name.length(); ix++) { char ch = name.charAt(ix); if (Character.isJavaIdentifierPart(ch)) { sb.append(ch); } } return sb.toString(); } static Pattern alphanum = RegexpUtil.uncheckedCompile("([^a-zA-Z0-9])", Perl5Compiler.READ_ONLY_MASK); /** Return a copy of the string with all non-alphanumeric chars * escaped by backslash. Useful when embedding an unknown string in * a regexp */ public static String escapeNonAlphaNum(String str) { Substitution subst = new Perl5Substitution("\\\\$1"); return Util.substitute(RegexpUtil.getMatcher(), alphanum, subst, str, Util.SUBSTITUTE_ALL); } private static java.util.regex.Pattern COMBINING_DIACRIT_PAT = java.util.regex.Pattern.compile("\\p{InCombiningDiacriticalMarks}+"); /** * Normalize string by removing diacritical marks. * @param s the string * @return the string with diacritical marks removed */ static public String toUnaccented(String s) { return COMBINING_DIACRIT_PAT.matcher(Normalizer.normalize(s, Form.NFD)).replaceAll(""); } // /** Accented character table for use by {@link #toUnaccentedFast(String). // * See comments there. */ // private static final String[][] ACCENTTABLE = { // {"\u00c0","A"}, // À, A with grave // {"\u00c1","A"}, // Á, A with acute // {"\u00c2","A"}, // Â, A with circumflex // {"\u00c3","A"}, // Â, A with tilde // {"\u00c4","A"}, // Ä, A with diaeresis // {"\u00c5","A"}, // Å, A with ring above // {"\u00c6","AE"}, // Æ, AE // {"\u00c7","C"}, // Ç, C with cedilla // {"\u00c8","E"}, // È, E with grave // {"\u00c9","E"}, // É, E with acute // {"\u00ca","E"}, // Ê, E with circumflex // {"\u00cb","E"}, // Ë, E with diaeresis // {"\u00cc","I"}, // Ì, I with grave // {"\u00cd","I"}, // Í, I with acute // {"\u00ce","I"}, // Î, I with circumflex // {"\u00cf","I"}, // Ï, I with diaeresis // {"\u00d1","N"}, // Ñ, N with tilde // {"\u00d2","O"}, // Ò, O with grave // {"\u00d3","O"}, // Ó, O with acute // {"\u00d4","O"}, // Ô, O with circumflex // {"\u00d5","O"}, // Õ, O with tilde // {"\u00d6","O"}, // Ö, O with diaeresis // {"\u00d8","O"}, // Ø, O with a stroke // {"\u00d9","U"}, // Ù, U with grave // {"\u00da","U"}, // Ú, U with acute // {"\u00db","U"}, // Û, U with circumflex // {"\u00dc","U"}, // Ü, U with diaeresis // {"\u00dd","Y"}, // Ý, Y with acute // {"\u00e0","a"}, // à, a with grave // {"\u00e1","a"}, // á, a with acute // {"\u00e2","a"}, // â, a with circumflex // {"\u00e3","a"}, // ã, a with tilde // {"\u00e4","a"}, // ä, a with diaeresis // {"\u00e5","a"}, // å, a with ring above // {"\u00e6","ae"}, // æ, ae // {"\u00e7","c"}, // ç, c with cedilla // {"\u00e8","e"}, // è, e with grave // {"\u00e9","e"}, // é, e with acute // {"\u00ea","e"}, // ê, e with circumflex // {"\u00eb","e"}, // ë, e with diaeresis // {"\u00ec","i"}, // ì, i with grave // {"\u00ed","i"}, // í, i with acute // {"\u00ee","i"}, // î, i with circumflex // {"\u00ef","i"}, // ï, i with diaeresis // {"\u00f1","n"}, // ñ, n with tilde // {"\u00f2","o"}, // ò, o with grave // {"\u00f3","o"}, // ó, o with acute // {"\u00f4","o"}, // ô, o with circumflex // {"\u00f5","o"}, // õ, o with tilde // {"\u00f6","o"}, // ö, o with diaeresis // {"\u00f8","o"}, // ø, o with stroke // {"\u00f9","u"}, // ù, u with grave // {"\u00fa","u"}, // ú, u with acute // {"\u00fb","u"}, // û, u with circumflex // {"\u00fc","u"}, // ü, u with diaeresis // {"\u00fd","y"}, // ý, y with acute // {"\u00ff","y"}, // ÿ, y with diaeresis // }; // private static char[] AC_CHAR = new char[ACCENTTABLE.length]; // private static String[] AC_REP = new String[ACCENTTABLE.length]; // static { // for (int ix = 0; ix < ACCENTTABLE.length; ix++) { // AC_CHAR[ix] = ACCENTTABLE[ix][0].charAt(0); // AC_REP[ix] = ACCENTTABLE[ix][1]; // } // } // /** Alternate implementation of {@link @toUnaccented(String)}. Can be // * several times faster (depending on string length) but only handles // * accented characters that are in its table. */ // public static String toUnaccentedFast(String s) { // boolean modified = false; // int slen = s.length(); // StringBuilder sb = null; // outer: // for (int ix = 0; ix < slen; ix++) { // char ch = s.charAt(ix); // for (int jx = 0; jx < AC_CHAR.length; jx++) { // if (AC_CHAR[jx] == ch) { // if (!modified) { // sb = new StringBuilder(slen); // sb.append(s, 0, ix); // modified = true; // } // sb.append(AC_REP[jx]); // continue outer; // } // } // if (modified) { // sb.append(ch); // } // } // if (modified) { // return sb.toString(); // } else { // return s; // } // } // static String toUnaccentedFast0(String s) { // for (int iy = 0; iy < ACCENTTABLE.length; iy++) { // s = StringUtil.replaceString(s, ACCENTTABLE[iy][0], ACCENTTABLE[iy][1]); // } // return s; // } /** Escape values (and keys) to be included in a comma-separated string * of key=value. Comma, equals and backslash are escaped with * backslash */ public static String ckvEscape(String s) { if (s.indexOf('\\') < 0 && s.indexOf(',') < 0 && s.indexOf('=') < 0) { return s; } int len = s.length(); StringBuilder sb = new StringBuilder(len + 8); for (int ix = 0; ix < len; ix++) { char c = s.charAt(ix); switch(c) { // Special characters case '\\': sb.append("\\\\"); break; case ',': sb.append("\\,"); break; case '=': sb.append("\\="); break; default: sb.append(c); break; } } return sb.toString(); } /** Encode a string to be included in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled */ public static String csvEncode(String s) { if (s.indexOf('"') >= 0) { int len = s.length(); StringBuilder sb = new StringBuilder(len + 5); sb.append("\""); for (int ix = 0; ix < len; ix++) { char c = s.charAt(ix); switch (c) { case '\"': sb.append("\"\""); break; default: sb.append(c); break; } } sb.append("\""); return sb.toString(); } if (s.indexOf(' ') >= 0 || s.indexOf(',') >= 0) { StringBuilder sb = new StringBuilder(s.length() + 2); sb.append("\""); sb.append(s); sb.append("\""); return sb.toString(); } return s; } /** Encode an array of strings to form a row in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled. All encoded values are * combined with comma separators. */ public static String csvEncodeValues(String[] values) { if (values==null || values.length==0) return ""; //return csvEncodeValues(Arrays.asList(values)); StringBuilder sb = new StringBuilder(); // Build the string for those values which need the separator appended for (int i=0; i<values.length-1; i++) { sb.append(csvEncode(values[i]) + ","); } // Add the last item sb.append(csvEncode(values[values.length-1])); return sb.toString(); } /** Encode a list of strings to form a row in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled. All encoded values are * combined with comma separators. */ public static String csvEncodeValues(List<String> values) { if (values==null || values.size()==0) return ""; return csvEncodeValues(values.toArray(new String[]{})); } /** * Returns the number of instances of a particular substring in a string. * This ignores overlap, starting from the left, so 'xxxxxy' would have * 2 instances of 'xx', not 4. Empty string as a substring returns 0. */ public static int countOccurences(String str, String subStr) { int len = subStr.length(); if (len == 0) { return 0; } int pos = 0; int count = 0; while ((pos = str.indexOf(subStr, pos)) >= 0) { count++; pos += len; } return count; } /** Retpresents an integer width,height pair */ public static class CharWidthHeight { int width; int height; private CharWidthHeight(int w, int h) { width = w; height = h; } public int getWidth() { return width; } public int getHeight() { return height; } } /** Calculates the number of lines and the width (in characters) of the * longest line in a String. Not particularly efficient, shouldn't be * used on huge Strings */ public static CharWidthHeight countWidthAndHeight(String str) { if (str == null || str.length() == 0) { return new CharWidthHeight(0, 0); } int height = 1; int width = 0; int maxWidth = 0; int len = str.length(); for (int pos = 0; pos < len; pos++) { char c = str.charAt(pos); if (c == '\r' || c == '\n') { if (c == '\r' && pos+1 < len && str.charAt(pos+1) == '\n') { pos++; } // Don't increment height if string ends with a newline if (pos+1 < len) { height++; } if (width > maxWidth) { maxWidth = width; } width = 0; } else { width++; } } if (width > maxWidth) { maxWidth = width; } return new CharWidthHeight(maxWidth, height); } /** * Get the text between two other chunks of text. * @param line a String containing some text sandwiched between two known pieces of text * @param beginFlag the String coming before the required text * @param endFlag the String coming after the required text * @return the extracted text, or null if the string did not fit the specified format * @author Neil Mayo */ public static String getTextBetween(String line, String beginFlag, String endFlag){ int tBegin = StringUtil.indexOfIgnoreCase(line, beginFlag); // Get the first position of the endFlag appearing after the beginFlag tBegin += beginFlag.length(); int tEnd = StringUtil.indexOfIgnoreCase(line, endFlag, tBegin); // Check that the flags were found if(tBegin < 0 || tEnd<0 || tBegin>tEnd) { return null; } return line.substring(tBegin, tEnd); } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param r a Reader * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(final Reader r) { return new Reader() { boolean saw_CR = false; final char[] cb = new char[1]; public int read(char cbuf[], int off, int len) throws IOException { int i; int n = 0; for (i = 0; i < len; i++) { if ((n = r.read(cb, 0, 1)) <= 0) { break; } if (saw_CR) { saw_CR = false; if (cb[0] == '\n') { if (r.read(cb, 0, 1) <= 0) { break; } } } if (cb[0] == '\r') { saw_CR = true; cb[0] = '\n'; } cbuf[off+i] = cb[0]; } return (i == 0) ? n : i; } public void close() throws IOException { r.close(); } }; } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param in an input stream * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(InputStream in) { return getLineReader(in, null); } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param in an input stream * @param encoding the character encoding * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(InputStream in, String encoding) { return getLineReader(StreamUtil.getReader(in, encoding)); } /** Return a reader that removes backslash-newline sequences * (line-continuation) * @param r a Reader * @return a filtered reader that removes line-continuation sequences */ public static Reader getLineContinuationReader(final Reader r) { return new Reader() { boolean saw_bslash = false; final char[] cb = new char[1]; int lastch = -1; public int read(char cbuf[], int off, int len) throws IOException { int i; int n = 0; int endoff = off + len; while (off < endoff) { // if have a character waiting, emit it if (lastch >= 0) { cbuf[off++] = (char)lastch; lastch = -1; } else { if ((n = r.read(cb, 0, 1)) <= 0) { // end of input. do we have a hanging backslash? if (saw_bslash) { cbuf[off++] = '\\'; saw_bslash = false; } break; } switch (cb[0]) { case '\\': if (saw_bslash) { // if already seen a backslash, output that one cbuf[off++] = '\\'; } else { saw_bslash = true; } break; case '\n': if (saw_bslash) { saw_bslash = false; } else { cbuf[off++] = cb[0]; } break; default: if (saw_bslash) { cbuf[off++] = '\\'; saw_bslash = false; lastch = cb[0]; } else { cbuf[off++] = cb[0]; } break; } } } int nread = len - (endoff - off); return nread == 0 ? -1 : nread; } public void close() throws IOException { r.close(); } }; } /** Reads a line from a BufferedReader, interpreting backslash-newline as * line-continuation. */ public static String readLineWithContinuation(BufferedReader rdr) throws IOException { StringBuilder sb = null; while (true) { String s = rdr.readLine(); if (s == null) { if (sb == null || sb.length() == 0) { return null; } else { return sb.toString(); } } if (s.endsWith("\\")) { if (sb == null) { sb = new StringBuilder(120); } sb.append(s, 0, s.length() - 1); } else if (sb == null || sb.length() == 0) { return s; } else { sb.append(s); return sb.toString(); } } } /** Reads line from a BuffereReader into a StringBuilder, interpreting * backslash-newline as line-continuation, until either end-of-stream or * maxSize chars read. May read one more line beyond maxSize. */ public static boolean readLinesWithContinuation(BufferedReader rdr, StringBuilder sb, int maxSize) throws IOException { while (true) { String s = rdr.readLine(); if (s == null) { return sb.length() != 0; } if (s.endsWith("\\")) { sb.append(s, 0, s.length() - 1); continue; } sb.append(s, 0, s.length()); sb.append("\n"); if (sb.length() >= maxSize) { return true; } } } /** Return a string with lines from a reader, separated by a newline * character. The reader is not closed. Throw if more than maxSize * chars. Reader is wrapped with a reader returned by {@link * #getLineReader(Reader) before processing. */ public static String fromReader(Reader r, int maxSize) throws IOException { r = getLineReader(r); char[] buf = new char[1000]; StringBuilder sb = new StringBuilder(1000); int len; while ((len = r.read(buf)) >= 0) { sb.append(buf, 0, len); if (maxSize > 0 && sb.length() > maxSize) { throw new FileTooLargeException(); } } return sb.toString(); } /** Read chars from a Reader into a StringBuilder up to maxSize. * @param r the Reader to read from * @param sb the StringBuilder to fill * @param maxChars maximum number of chars to read * @return true if anything was read (false if reader was already at * eof) */ public static boolean fillFromReader(Reader r, StringBuilder sb, int maxChars) throws IOException { char[] buf = new char[1000]; int tot = 0; int len; while (tot < maxChars && (len = r.read(buf, 0, Math.min(buf.length, maxChars - tot))) >= 0) { sb.append(buf, 0, len); tot += len; } return tot != 0; } /** Return a string with lines from a reader, separated by a newline character. * Reader is wrapped with a reader returned by {@link #getLineReader(Reader) * before processing. */ public static String fromReader(Reader r) throws IOException { return fromReader(r, -1); } /** Return a string with lines from an InputStream separated by a newline * character using the default encoding*/ public static String fromInputStream(InputStream in) throws IOException { // use our default encoding rather than system default return fromReader(new InputStreamReader(in, Constants.DEFAULT_ENCODING)); } /** Return a string with lines from an InputStream separated by a newline * character using the default encoding. The InputStream is not closed. * Throw if more than maxSize chars */ public static String fromInputStream(InputStream in, int maxSize) throws IOException { // use our default encoding rather than system default return fromReader(new InputStreamReader(in, Constants.DEFAULT_ENCODING), maxSize); } /** Return a string with lines from the file path separated by a newline character */ public static String fromFile(String path) throws IOException { return fromFile(new File(path)); } /** Return a string with lines from the file separated by a newline character */ public static String fromFile(File file) throws IOException { Reader rdr = new FileReader(file); try { return fromReader(rdr); } finally { IOUtil.safeClose(rdr); } } /** Write a string to a File */ public static void toFile(File file, String s) throws IOException { Writer w = new BufferedWriter(new FileWriter(file)); try { StringUtil.toWriter(w, s); } finally { IOUtil.safeClose(w); } } /* Write the string to the OutputStream */ public static void toOutputStream(OutputStream out, String s) throws IOException { toWriter(new BufferedWriter(new OutputStreamWriter(out)),s); } /* Write the string to the Writer */ public static void toWriter(Writer w, String s) throws IOException { w.write(s); w.flush(); } /** * Test whether a string is null or the empty string * @param s the string * @return true if s is null or the empty string */ public static boolean isNullString(String s) { return s == null || s.length() == 0; } /** * Test whether a string contains only 7-bit ascii characters * @param s the string * @return true if all chars in s are ascii */ public static boolean isAscii(String s) { for (int ix = 0; ix < s.length(); ix++) { if (s.charAt(ix) > 0x7F) { return false; } } return true; } /** * Same as str.compareTo(str), except null is the lowest value. */ public static int compareToNullLow(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : -1; } if (str2 == null) { return 1; } return str1.compareTo(str2); } /** * Same as str.compareTo(str), except null is the highest value. */ public static int compareToNullHigh(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : 1; } if (str2 == null) { return -1; } return str1.compareTo(str2); } /** * Comparison that matches the traversal order of CachedUrlSet iterators. * Differs from natural sort order in that '/' sorts before any other * char, because the tree traversal is pre-order. */ public static int preOrderCompareTo(String str1, String str2) { int len1 = str1.length(); int len2 = str2.length(); int n = Math.min(len1, len2); for (int ix = 0; ix < n; ix++) { char c1 = str1.charAt(ix); char c2 = str2.charAt(ix); if (c1 != c2) { if (c1 == '/') { return -1; } if (c2 == '/') { return 1; } return c1 - c2; } } return len1 - len2; } /** * Comparison that matches the traversal order of CachedUrlSet iterators. * Differs from natural sort order in that '/' sorts before any other * char, because the tree traversal is pre-order. Null sorts after all * nun-null strings. */ public static int preOrderCompareToNullHigh(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : 1; } if (str2 == null) { return -1; } return StringUtil.preOrderCompareTo(str1, str2); } public static Comparator PRE_ORDER_COMPARATOR = new PreOrderComparator(); /** Comparator that aorts in pre-order traversal order. null is not * permitted. */ public static class PreOrderComparator implements Comparator<String> { public int compare(String s1, String s2) { return StringUtil.preOrderCompareTo(s1, s2); } } /** * Compare two strings for equality or both null. * @param s1 string 1 * @param s2 string 2 * @return true if strings are equal or both null */ public static boolean equalStrings(String s1, String s2) { if (s1 == null) { return s2 == null; } else { return s1.equals(s2); } } /** * Compare two strings for case-independent equality or both null. * @param s1 string 1 * @param s2 string 2 * @return true if strings are equal or both null */ public static boolean equalStringsIgnoreCase(String s1, String s2) { if (s1 == null) { return s2 == null; } else { return s1.equalsIgnoreCase(s2); } } /** Sort a set of strings case-independently */ public static Set<String> caseIndependentSortedSet(Collection<String> coll) { Set<String> res = new TreeSet(new CaseIndependentComparator()); res.addAll(coll); return res; } public static class CaseIndependentComparator implements Comparator<String> { public int compare(String s1, String s2) { // Don't allow null to cause NPE if (s1 == null) { return (s2 == null ? 0 : -1); } else if (s2 == null) { return 1; } return s1.compareToIgnoreCase(s2); } } /** Like System.arrayCopy, for characters within one StringBuilder. * @param sb the buffer * @param srcPos chars copied starting from here * @param destPos chars copied starting to here * @param len number of chars copied */ public static void copyChars(StringBuilder sb, int srcPos, int destPos, int len) { if (srcPos > destPos) { while (--len >= 0) { sb.setCharAt(destPos++, sb.charAt(srcPos++)); } } else if (srcPos < destPos) { while (--len >= 0) { sb.setCharAt(destPos + len, sb.charAt(srcPos + len)); } } } private static long gensymCtr = 0; /** * Generate a unique string. * @param base the initial substring * @return a string consisting of the supplied initial substring and a * unique counter value. */ public static String gensym(String base) { return base + (gensymCtr++); } /** Return a string of n spaces */ public static String tab(int n) { return StringUtils.repeat(" ", n); } /** * Trim a hostname, removing "www." from the front, if present, and the * TLD from the end. If this would result in an empty string, the entire * name is returned. * @param hostname a hostname string * @return the trimmed hostname */ public static String trimHostName(String hostname) { if (hostname == null) return null; int start = 0; if (hostname.regionMatches(true, 0, "www.", 0, 4)) { start = 4; } int end = hostname.lastIndexOf('.'); if (end <= start) { // if trimming www left nothing but TLD, return whole name return hostname; } return hostname.substring(start, end); } /** Parse a string as a time interval. An interval is specified as an * integer with an optional suffix. No suffix means milliseconds, s, m, * h, d, w, y indicates seconds, minutes, hours, days, weeks and years * respectively. As a special case, "ms" means milliseconds. * @param str the interval string * @return interval in milliseconds */ // tk - extend to accept combinations: xxHyyMzzS, etc. public static long parseTimeInterval(String str) { try { int len = str.length(); char suffix = str.charAt(len - 1); String numstr; long mult = 1; if (Character.isDigit(suffix)) { numstr = str; } else { if (StringUtil.endsWithIgnoreCase(str, "ms")) { numstr = str.substring(0, len - 2); } else { numstr = str.substring(0, len - 1); switch (Character.toUpperCase(suffix)) { case 'S': mult = Constants.SECOND; break; case 'M': mult = Constants.MINUTE; break; case 'H': mult = Constants.HOUR; break; case 'D': mult = Constants.DAY; break; case 'W': mult = Constants.WEEK; break; case 'Y': mult = Constants.YEAR; break; default: throw new NumberFormatException("Illegal time interval suffix: " + str); } } } return Long.parseLong(numstr) * mult; } catch (IndexOutOfBoundsException e) { throw new NumberFormatException("empty string"); } } private static Pattern sizePat = RegexpUtil.uncheckedCompile("^([0-9.]+)\\s*([a-zA-Z]*)", Perl5Compiler.READ_ONLY_MASK); private static String suffixes[] = {"b", "KB", "MB", "GB", "TB", "PB"}; /** Parse a string as a size in bytes, with a optional suffix. No suffix * means bytes, kb, mb, gb, tb indicate kilo-, mega-, giga, tera-bytes * respectively. * @param str the size string * @return size in bytes */ public static long parseSize(String str) { str = str.trim(); Perl5Matcher matcher = RegexpUtil.getMatcher(); if (!matcher.contains(str, sizePat)) { throw new NumberFormatException("Illegal size syntax: " + str); } MatchResult matchResult = matcher.getMatch(); String num = matchResult.group(1); String suffix = matchResult.group(2); try { float f = Float.parseFloat(num); long mult = 1; if (StringUtil.isNullString(suffix)) { return (long)f; } for (int ix = 0; ix < suffixes.length; ix++) { if (suffix.equalsIgnoreCase(suffixes[ix])) { return (long)(f * mult); } mult *= 1024; } throw new NumberFormatException("Illegal size suffix: " + str); } catch (NumberFormatException ex) { throw new NumberFormatException("Illegal size syntax: " + str); } } /** Trim leading and trailing blank lines from a block of text */ public static String trimBlankLines(String txt) { StringBuilder buf = new StringBuilder(txt); while (buf.length()>0 && buf.charAt(0) == '\n') { buf.deleteCharAt(0); } while (buf.length()>0 && buf.charAt(buf.length() - 1) == '\n') { buf.deleteCharAt(buf.length() - 1); } return buf.toString(); } private static Pattern nonUnixEol = RegexpUtil.uncheckedCompile("\\r\\n?", Perl5Compiler.MULTILINE_MASK); /** Normalize newlines to Unix eol */ public static String normalizeEols(String str) { Substitution subst = new Perl5Substitution("\n"); return Util.substitute(RegexpUtil.getMatcher(), nonUnixEol, subst, str, Util.SUBSTITUTE_ALL); } private static Pattern nlEol = RegexpUtil.uncheckedCompile("([\n\r][\n\t ]*)", Perl5Compiler.MULTILINE_MASK); /** Trim EOLs and leading whitespace from a block of text */ public static String trimNewlinesAndLeadingWhitespace(String str) { if (str.indexOf("\n") == -1 && str.indexOf("\r") == -1) { return str; } Substitution subst = new Perl5Substitution(""); return Util.substitute(RegexpUtil.getMatcher(), nlEol, subst, str, Util.SUBSTITUTE_ALL); } // Unit Descriptor private static class UD { String str; // suffix string long millis; // milliseconds in unit int threshold; // min units to output String stop; // last unit to output if this matched UD(String str, long millis) { this(str, millis, 1); } UD(String str, long millis, int threshold) { this(str, millis, threshold, null); } UD(String str, long millis, int threshold, String stop) { this.str = str; this.millis = millis; this.threshold = threshold; this.stop = stop; } } static UD units[] = { new UD("w", Constants.WEEK, 3, "h"), new UD("d", Constants.DAY, 1, "m"), new UD("h", Constants.HOUR), new UD("m", Constants.MINUTE), new UD("s", Constants.SECOND, 0), }; public static String protectedDivide(long numerator, long denominator) { return protectedDivide(numerator, denominator, "inf"); } public static String protectedDivide(long numerator, long denominator, String infStr) { if (denominator == 0) { return infStr; } long val = numerator / denominator; return String.valueOf(val); } /** Generate a string representing the time interval. * @param millis the time interval in milliseconds * @return a string in the form dDhHmMsS */ public static String timeIntervalToString(long millis) { StringBuilder sb = new StringBuilder(); if (millis < 0) { sb.append("-"); millis = -millis; } return posTimeIntervalToString(millis, sb); } private static String posTimeIntervalToString(long millis, StringBuilder sb) { if (millis < 10 * Constants.SECOND) { sb.append(millis); sb.append("ms"); } else { boolean force = false; String stop = null; for (int ix = 0; ix < units.length; ix++) { UD iu = units[ix]; long n = millis / iu.millis; if (force || n >= iu.threshold) { millis %= iu.millis; sb.append(n); sb.append(iu.str); force = true; if (stop == null) { if (iu.stop != null) { stop = iu.stop; } } else { if (stop.equals(iu.str)) { break; } } } } } return sb.toString(); } /** Generate a more verbose string representing the time interval. * @param millis the time interval in milliseconds * @return a string in the form "<d> days, <h> hours, <m> minutes, <s> * seconds" */ public static String timeIntervalToLongString(long millis) { StringBuilder sb = new StringBuilder(); long temp = 0; if (millis < 0) { sb.append("-"); millis = -millis; } if (millis >= Constants.SECOND) { temp = millis / Constants.DAY; if (temp > 0) { sb.append(numberOfUnits(temp, "day")); millis -= temp * Constants.DAY; if (millis >= Constants.MINUTE) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.HOUR; if (temp > 0) { sb.append(numberOfUnits(temp, "hour")); millis -= temp * Constants.HOUR; if (millis >= Constants.MINUTE) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.MINUTE; if (temp > 0) { sb.append(numberOfUnits(temp, "minute")); millis -= temp * Constants.MINUTE; if(millis >= Constants.SECOND) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.SECOND; if (temp > 0) { sb.append(numberOfUnits(temp, "second")); } return sb.toString(); } else { return "0 seconds"; } } private static final NumberFormat fmt_1dec = new DecimalFormat("0.0"); private static final NumberFormat fmt_0dec = new DecimalFormat("0"); static final String[] byteSuffixes = {"KB", "MB", "GB", "TB", "PB"}; public static String sizeToString(long size) { if (size < 1024) { return size + "B"; } return sizeKBToString(size / 1024); } public static String sizeKBToString(long size) { double base = 1024.0; double x = (double)size; int len = byteSuffixes.length; for (int ix = 0; ix < len; ix++) { if (x < base || ix == len-1) { StringBuilder sb = new StringBuilder(); if (x < 10.0) { sb.append(fmt_1dec.format(x)); } else { sb.append(fmt_0dec.format(x)); } sb.append(byteSuffixes[ix]); return sb.toString(); } x = x / base; } return ""+size; } /** Remove the first line of the stack trace, iff it duplicates the end * of the exception message */ public static String trimStackTrace(String msg, String trace) { int pos = trace.indexOf("\n"); if (pos > 0) { String l1 = trace.substring(0, pos); if (msg.endsWith(l1)) { return trace.substring(pos + 1); } } return trace; } /** Translate an exception's stack trace to a string. */ public static String stackTraceString(Throwable th) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); th.printStackTrace(pw); return sw.toString(); } /** Convert the first character and every character that follows a space * to uppercase. */ public static String titleCase(String txt) { return titleCase(txt, ' '); } /** Convert the first character and every character that follows the * separator char to uppercase. */ public static String titleCase(String txt, char separator) { int len = txt.length(); if (len == 0) { return ""; } StringBuilder buf = new StringBuilder(txt); buf.setCharAt(0,Character.toUpperCase(buf.charAt(0))); for (int i=1; i<len; i++) { if (buf.charAt(i-1)==separator) { buf.setCharAt(i,Character.toUpperCase(buf.charAt(i))); } } return buf.toString(); } public static class FileTooLargeException extends IOException { public FileTooLargeException() { super(); } public FileTooLargeException(String message) { super(message); } } /** * Returns the index of the nth occurrence of searchStr in sourceStr or -1 * if there aren't n instances of searchStr in sourceStr */ public static int nthIndexOf(int n, String sourceStr, String searchStr) { if (n <= 0) { return -1; } int idx = -1; do { idx = sourceStr.indexOf(searchStr, idx+1); } while (--n > 0 && idx >= 0); return idx; } /** * Scans through the reader looking for the String str; case sensitive * @param reader Reader to search; it will be at least partially consumed * @return true if the string is found, false if the end of reader is * reached without finding the string */ public static boolean containsString(Reader reader, String str) throws IOException { return StringUtil.containsString(reader, str, false); } public static boolean containsString(Reader reader, String str, int buffSize) throws IOException { return containsString(reader, str, false, buffSize); } public static boolean containsString(Reader reader, String str, boolean ignoreCase) throws IOException { return containsString(reader, str, ignoreCase, 4096); } /** * Scans through the reader looking for the String str * @param reader Reader to search; it will be at least partially consumed * @param ignoreCase whether to ignore case or not * @return true if the string is found, false if the end of reader is * reached without finding the string */ public static boolean containsString(Reader reader, String str, boolean ignoreCase, int buffSize) throws IOException { if (reader == null) { throw new NullPointerException("Called with a null reader"); } else if (str == null) { throw new NullPointerException("Called with a null string"); } else if (str.length() == 0) { throw new IllegalArgumentException("Called with a blank String"); } else if (buffSize <= 0) { throw new IllegalArgumentException("Called with a buffSize < 0"); } int strlen = str.length(); // simpiify boundary conditions by ensuring buffer always larger than // search string buffSize = Math.max(buffSize, strlen * 2); int shiftSize = buffSize - (strlen - 1); String regex = java.util.regex.Pattern.quote(str); int flags = ignoreCase ? java.util.regex.Pattern.CASE_INSENSITIVE : 0; java.util.regex.Pattern pat = java.util.regex.Pattern.compile(regex, flags); StringBuilder sb = new StringBuilder(buffSize); while (StringUtil.fillFromReader(reader, sb, buffSize - sb.length())) { java.util.regex.Matcher m1 = pat.matcher(sb); if (m1.find()) { return true; } if (sb.length() < buffSize) { // avoid unnecessary shift on final iteration return false; } sb.delete(0, shiftSize); } return false; } // BoyerMoore.java throws on some inputs with ignoreCase = true. // This implementation retired pending a fix. /*public*/ static boolean containsString_bug(Reader reader, String str, boolean ignoreCase, int buffSize) throws IOException { if (reader == null) { throw new NullPointerException("Called with a null reader"); } else if (str == null) { throw new NullPointerException("Called with a null string"); } else if (str.length() == 0) { throw new IllegalArgumentException("Called with a blank String"); } else if (buffSize <= 0) { throw new IllegalArgumentException("Called with a buffSize < 0"); } if (ignoreCase) { str = str.toLowerCase(); } char[] buff = new char[buffSize]; BoyerMoore bm = new BoyerMoore(ignoreCase); bm.compile(str); int bcount; int numPartialMatch = 0; while ((bcount = StreamUtil.readChars(reader, buff, buff.length)) > 0) { if (numPartialMatch > 0 && bcount >= (str.length() - numPartialMatch)) { //we previously matched this many chars at the end of the last buff if (log.isDebug3()) { log.debug3("Found a partial match before in last buffer: "+ str.substring(numPartialMatch)+"; looking for the rest"); } if (startsWith(buff, str.substring(numPartialMatch), ignoreCase)) { if (log.isDebug3()) {log.debug3("Found the second half of a partial match");} return true; } } if (bm.search(buff, 0, bcount) >= 0) { if (log.isDebug3()) {log.debug3("Found a full match in one buffer");} return true; } else { numPartialMatch = bm.partialMatch(); if (log.isDebug3() && numPartialMatch != 0) { log.debug3("Found a partial match of "+numPartialMatch); } } } return false; } /** * * @return true if the first str.length() chars in buffer match str */ private static boolean startsWith(char[]buffer, String str, boolean ignoreCase) { for (int ix=0; ix<(str.length()); ix++) { if (Character.toLowerCase(str.charAt(ix)) != Character.toLowerCase(buffer[ix])) { if (log.isDebug3()) { log.debug3(str.charAt(ix)+" didn't match "+ buffer[ix]); } return false; } } return true; } /** Return a string like "0 units", "1 unit", "n units" * @param number the number of whatever units * @param unit Single form of unit, plural formed by adding "s" */ public static String numberOfUnits(long number, String unit) { return numberOfUnits(number, unit, unit + "s"); } /** Return a string like "0 units", "1 unit", "n units" * @param number the number of whatever units * @param unit Single form of unit * @param pluralUnit plural form of unit */ public static String numberOfUnits(long number, String unit, String pluralUnit) { if (number == 1) { return number + " " + unit; } else { return number + " " + pluralUnit; } } public static boolean equalsIgnoreCase(char kar1, char kar2) { return (Character.toLowerCase(kar1) == Character.toLowerCase(kar2)); } /** * Provides a 64-bit hash of a text string. * * @param payload * A String for which the hash is to be provided. * @return a long with the 64-bit hash of the text string. * @throws Exception * if there are problems computing the LOCKSS identifier. */ public static long hash64(String payload) { try { byte[] digest = MessageDigest.getInstance("MD5").digest(payload.getBytes("UTF-8")); return new BigInteger(1, digest).longValue(); } catch (NoSuchAlgorithmException nsae) { throw new RuntimeException(nsae); } catch (UnsupportedEncodingException uea) { throw new RuntimeException(uea); } } private static java.util.regex.Pattern BLANK_NLS_AND_TABS_PATTERN = java.util.regex.Pattern.compile("[\n\t]+"); /** * Blanks out newlines and tabs in a text string. * * @param text * A String with the text string to be processed. * @return a String with newlines and tabs replaced by spaces. */ public static String blankOutNlsAndTabs(String text) { if (text != null) { return BLANK_NLS_AND_TABS_PATTERN.matcher(text).replaceAll(" "); } return ""; } /** * <p> * A regular expression that matches various Unicode quotation marks: * </p> * <ul> * <li>U+0022 QUOTATION MARK</li> * <li>U+201C LEFT DOUBLE QUOTATION MARK</li> * <li>U+201D RIGHT DOUBLE QUOTATION MARK</li> * <li>U+201F DOUBLE HIGH-REVERSED-9 QUOTATION MARK</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Quotation_mark_glyphs */ public static final java.util.regex.Pattern QUOTATION_MARKS = java.util.regex.Pattern.compile("\"|\u201c|\u201d|\u201f"); /** * <p> * The canonical quotation mark glyph ({@value #CANONICAL_QUOTATION_MARK}). * </p> * * @since 1.65 */ public static final String CANONICAL_QUOTATION_MARK = "\""; /** * <p> * Replaces all quotation marks that match {@link #QUOTATION_MARKS} with * {@link #CANONICAL_QUOTATION_MARK}. * </p> * * @since 1.65 */ public static String normalizeQuotationMarks(String str) { return QUOTATION_MARKS.matcher(str).replaceAll(CANONICAL_QUOTATION_MARK); } /** * <p> * A regular expression that matches various Unicode apostrophes: * </p> * <ul> * <li>U+0027 APOSTROPHE</li> * <li>U+2018 LEFT SINGLE QUOTATION MARK</li> * <li>U+2019 RIGHT SINGLE QUOTATION MARK</li> * <li>U+201B SINGLE HIGH-REVERSED-9 QUOTATION MARK</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Quotation_mark_glyphs */ public static final java.util.regex.Pattern APOSTROPHES = java.util.regex.Pattern.compile("'|\u2018|\u2019|\u201b"); /** * <p> * The canonical apostrophe glyph ({@value #CANONICAL_APOSTROPHE}). * </p> * * @since 1.65 */ public static final String CANONICAL_APOSTROPHE = "'"; /** * <p> * Replaces all apostrophes that match {@link #APOSTROPHES} with * {@link #CANONICAL_APOSTROPHE}. * </p> * * @since 1.65 */ public static String normalizeApostrophes(String str) { return APOSTROPHES.matcher(str).replaceAll(CANONICAL_APOSTROPHE); } /** * <p> * A regular expression that matches various Unicode dashes: * </p> * <ul> * <li>U+002D HYPHEN-MINUS</li> * <li>U+2012 FIGURE DASH</li> * <li>U+2013 EN DASH</li> * <li>U+2014 EM DASH</li> * <li>U+2015 HORIZONTAL BAR</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Dash */ public static final java.util.regex.Pattern DASHES = java.util.regex.Pattern.compile("-|\u2012|\u2013|\u2014|\u2015"); /** * <p> * The canonical dash glyph ({@value #CANONICAL_DASH}). * </p> * * @since 1.65 */ public static final String CANONICAL_DASH = "-"; /** * <p> * Replaces all dashes that match {@link #DASHES} with * {@link #CANONICAL_DASH}. * </p> * * @since 1.65 */ public static String normalizeDashes(String str) { return DASHES.matcher(str).replaceAll(CANONICAL_DASH); } /** * Provides a printable version of the contents of an array ob objects, * suitable for logging. * @param theArray An Object[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(Object[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * Provides a printable version of the contents of an array of ints, suitable * for logging. * @param theArray An int[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(int[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * Provides a printable version of the contents of an array of longs, suitable * for logging. * @param theArray A long[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(long[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * <p> * Simply returns the given string, or the empty string <code>""</code> if * null, i.e. guarantees a non-null string as a result. * </p> * * @param str * A string. * @return If <code>str</code> is null, then <code>""</code>, otherwise * <code>str</code>. * @since 1.67 */ public static String nonNull(String str) { return (str == null) ? "" : str; } }
src/org/lockss/util/StringUtil.java
/* * $Id$ */ /* Copyright (c) 2000-2014 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.util; import java.util.*; import java.io.*; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.text.*; import java.text.Normalizer.Form; import java.lang.reflect.*; import java.math.BigInteger; import org.apache.oro.text.regex.*; import org.apache.commons.lang3.StringUtils; /** * This is a class to contain generic string utilities * */ public class StringUtil { private static final String DEFAULT_COLLECTION_SEPARATOR = ", "; private static final String NULL_OBJECT_PRINTABLE_TEXT = "(null)"; private static final Logger log = Logger.getLogger(StringUtil.class); /** * Find the longest common prefix of a pair of strings. Case sensitive. * @param s1 a string * @param s2 another string * @return the longest common prefix, which may be the emopty string */ public static String commonPrefix(String s1, String s2) { char[] c1 = s1.toCharArray(); char[] c2 = s2.toCharArray(); StringBuilder sb = new StringBuilder(); for (int i=0; i<Math.min(c1.length, c2.length); i++) { if (c1[i]==c2[i]) sb.append(c1[i]); else break; } return sb.toString(); } /** * Find the longest common suffix of a pair of strings. Case sensitive. * @param s1 a string * @param s2 another string * @return the longest common suffix, which may be the emopty string */ public static String commonSuffix(String s1, String s2) { char[] c1 = s1.toCharArray(); char[] c2 = s2.toCharArray(); StringBuilder sb = new StringBuilder(); for (int i=1; i<=Math.min(c1.length, c2.length); i++) { if (c1[c1.length-i]==c2[c2.length-i]) sb.append(c1[c1.length-i]); else break; } return sb.reverse().toString(); } /** * Replace all occurrences of oldstr in source with newstr * @param source string to be modified * @param oldstr string to be replace * @param newstr string to replace oldstr * @return new string with oldstr replaced by newstr */ public static String replaceString(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int thisIdx = source.indexOf(oldstr); if (thisIdx < 0) { return source; } int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); int oldIdx = 0; do { for (int ix = oldIdx; ix < thisIdx; ix++) { sb.append(source.charAt(ix)); } sb.append(newstr); oldIdx = thisIdx + oldLen; } while ((thisIdx = source.indexOf(oldstr, oldIdx)) >= 0); for (int ix = oldIdx; ix < sourceLen; ix++) { sb.append(source.charAt(ix)); } return sb.toString(); } public static String replaceFirst(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int index = source.indexOf(oldstr); if (index < 0) { return source; } else { int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); sb.append(source.substring(0, index)); sb.append(newstr); if (index + oldLen < sourceLen) { sb.append(source.substring(index + oldLen)); } return sb.toString(); } } public static String replaceLast(String source, String oldstr, String newstr) { int oldLen = oldstr.length(); if (oldLen == 0 || oldstr.equals(newstr)) { return source; } int index = source.lastIndexOf(oldstr); if (index < 0) { return source; } else { int sourceLen = source.length(); StringBuilder sb = new StringBuilder(sourceLen); sb.append(source.substring(0, index)); sb.append(newstr); if (index + oldLen < sourceLen) { sb.append(source.substring(index + oldLen)); } return sb.toString(); } } /** * Concatenate elements of collection into string, separated by commas * @param c - Collection of object (on which toString() will be called) * @return Concatenated string */ public static String separatedString(Collection c) { return separatedString(c, DEFAULT_COLLECTION_SEPARATOR); } /** * Concatenate elements of collection into string, with separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(Collection c, String separator) { return separatedString(c, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of object array into string, with separators * @param arr - Array of object (on which toString() will be called) * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(Object[] arr, String separator) { return separatedString(ListUtil.fromArray(arr), "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of int array into string, with separators * @param arr - Array of int elements * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(int[] arr, String separator) { ArrayList col = new ArrayList(arr.length); for (int ii = 0 ; ii < arr.length ; ++ii) { col.add(Integer.toString(arr[ii])); } return separatedString(col, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of long array into string, with separators * @param arr - Array of int elements * @param separator - String to put between elements * @return Concatenated string */ public static String separatedString(long[] arr, String separator) { ArrayList col = new ArrayList(arr.length); for (int ii = 0 ; ii < arr.length ; ++ii) { col.add(Long.toString(arr[ii])); } return separatedString(col, "", separator, "", new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, with separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param sb - StringBuilder to write result into * @return sb */ public static StringBuilder separatedString(Collection c, String separator, StringBuilder sb) { return separatedString(c, "", separator, "", sb); } /** * Concatenate elements of collection into string, delimiting each element, * adding separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param delimiter - String with which to surround each element * @return Concatenated string */ public static String separatedDelimitedString(Collection c, String separator, String delimiter) { return separatedString(c, delimiter, delimiter + separator + delimiter, delimiter, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, delimiting each element, * adding separators * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param delimiter1 - String with which to prefix each element * @param delimiter2 - String with which to suffix each element * @return Concatenated string */ public static String separatedDelimitedString(Collection c, String separator, String delimiter1, String delimiter2) { return separatedString(c, delimiter1, delimiter2 + separator + delimiter1, delimiter2, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * terminating with terminator * @param c - Collection of object (on which toString() will be called) * @param separator - String to put between elements * @param terminator - String with which to terminate result * @return Concatenated string */ public static String terminatedSeparatedString(Collection c, String separator, String terminator) { return separatedString(c, "", separator, terminator, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * delimitig each element * @param c - Collection of object (on which toString() will be called) * @param separatorFirst - String to place before first element * @param separatorInner - String with which to separate elements * @param separatorLast - String to place after last element * @return Concatenated string */ public static String separatedString(Collection c, String separatorFirst, String separatorInner, String separatorLast) { return separatedString(c, separatorFirst, separatorInner, separatorLast, new StringBuilder()).toString(); } /** * Concatenate elements of collection into string, adding separators, * delimitig each element * @param c - Collection of object (on which toString() will be called) * @param separatorFirst - String to place before first element * @param separatorInner - String with which to separate elements * @param separatorLast - String to place after last element * @param sb - StringBuilder to write result into * @return sb */ public static StringBuilder separatedString(Collection c, String separatorFirst, String separatorInner, String separatorLast, StringBuilder sb) { if (c == null) { return sb; } Iterator iter = c.iterator(); boolean first = true; while (iter.hasNext()) { if (first) { first = false; sb.append(separatorFirst); } else { sb.append(separatorInner); } Object obj = iter.next(); sb.append(obj == null ? NULL_OBJECT_PRINTABLE_TEXT : obj.toString()); } if (!first) { sb.append(separatorLast); } return sb; } /** Break a string at a separator char, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. * @param trimEachString is true, each string in the result will be trim()ed */ public static Vector<String> breakAt(String s, char sep, int maxItems, boolean discardEmptyStrings, boolean trimEachString) { Vector<String> res = new Vector<String>(); int len; if (s == null || (len = s.length()) == 0) { return res; } if (maxItems <= 0) { maxItems = Integer.MAX_VALUE; } for (int pos = 0; maxItems > 0; maxItems-- ) { int end = s.indexOf(sep, pos); if (end == -1) { if (pos > len) { break; } end = len; } if (!discardEmptyStrings || pos != end) { String str = s.substring(pos, end); if (trimEachString) { str = str.trim(); } if (!discardEmptyStrings || str.length() != 0) { res.addElement(str); } } pos = end + 1; } return res; } /** Break a string at a separator string, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator string * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. * @param trimEachString is true, each string in the result will be trim()ed */ public static Vector<String> breakAt(String s, String sep, int maxItems, boolean discardEmptyStrings, boolean trimEachString) { Vector res = new Vector(); int len; if (s == null || (len = s.length()) == 0) { return res; } if (maxItems <= 0) { maxItems = Integer.MAX_VALUE; } for (int pos = 0; maxItems > 0; maxItems-- ) { int end = s.indexOf(sep, pos); if (end == -1) { if (pos > len) { break; } end = len; } if (!discardEmptyStrings || pos != end) { String str = s.substring(pos, end); if (trimEachString) { str = str.trim(); } if (!discardEmptyStrings || str.length() != 0) { res.addElement(str); } } pos = end + sep.length(); } return res; } /** Break a string at a separator char, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. */ public static Vector<String> breakAt(String s, char sep, int maxItems, boolean discardEmptyStrings) { return breakAt(s, sep, maxItems, discardEmptyStrings, false); } /** Break a string at a separator char, returning a vector of strings. * Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator char */ public static Vector<String> breakAt(String s, char sep) { return breakAt(s, sep, 0); } /** Break a string at a separator char, returning a vector of at most * maxItems strings. Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator char * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. */ public static Vector<String> breakAt(String s, char sep, int maxItems) { return breakAt(s, sep, maxItems, false); } /** Break a string at a separator String, returning a vector of at most * maxItems strings. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. * @param discardEmptyStrings if true, empty strings (caused by delimiters * at the start or end of the string, or adjacent delimiters) will not be * included in the result. */ public static Vector<String> breakAt(String s, String sep, int maxItems, boolean discardEmptyStrings) { return breakAt(s, sep, maxItems, discardEmptyStrings, false); } /** Break a string at a separator String, returning a vector of strings. * Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator String */ public static Vector<String> breakAt(String s, String sep) { return breakAt(s, sep, 0); } /** Break a string at a separator String, returning a vector of at most * maxItems strings. Include any empty strings in the result. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param maxItems maximum size of returned vector, 0 = unlimited. If * nonzero, substrings past the nth are discarded. */ public static Vector<String> breakAt(String s, String sep, int maxItems) { return breakAt(s, sep, maxItems, false); } /** Break a string at a separator String, returning a vector of strings. * @param s string containing zero or more occurrences of separator * @param sep the separator String * @param trimAndDiscardEmpty if true, each string is trim()ed, and empty * strings (caused by delimiters at the start or end of the string, or * adjacent delimiters) will not be included in the result. */ public static Vector<String> breakAt(String s, String sep, boolean trimAndDiscardEmpty) { return breakAt(s, sep, 0, true, true); } /** * Trim the end off of a string starting at the first occurrence of any * of the characters specified. * * @param str String to trim * @param chars String containing the chars to trim at * @return str turncated at the first occurrence of any of the chars, or * the original string if no occurrence */ public static String truncateAtAny(String str, String chars) { if (str == null) { return null; } if (chars != null) { for (int jx=0, len = chars.length(); jx < len; jx++) { int pos = str.indexOf(chars.charAt(jx)); if (pos >= 0) { str = str.substring(0, pos); } } } return str; } /** * Trim the end off of a string starting at the specified character. * * @param str String to trim * @param chr char to trim at * @return str turncated at the first occurrence of char, or * the original string if no occurrence */ public static String truncateAt(String str, char chr) { if (str == null) { return null; } int pos = str.indexOf(chr); if (pos < 0) { return str; } return str.substring(0, pos); } /** If string is longer than len, replace characters in the middle with * an elipsis so that the string is no longer than len * @param s the string * @param len maximum length of returned string */ public static String elideMiddleToMaxLen(String s, int len) { if (s == null || s.length() <= len) { return s; } int split = len / 2; return s.substring(0, split) + "..." + s.substring(s.length() - split); } /** Like indexOf except is case-independent */ public static int indexOfIgnoreCase(String str, String substr) { return indexOfIgnoreCase(str, substr, 0); } /** Like indexOf except is case-independent */ public static int indexOfIgnoreCase(String str, String substr, int fromIndex) { if (str == null || substr == null) { return -1; } int sublen = substr.length(); int last = str.length() - sublen; for (int ix = fromIndex; ix <= last; ix++) { if (str.regionMatches(true, ix, substr, 0, sublen)) { return ix; } } return -1; } /** Like endsWith except is case-independent */ public static boolean endsWithIgnoreCase(String str, String end) { int lend = end.length(); return str.regionMatches(true, str.length() - lend, end, 0, lend); } /** Like startsWith except is case-independent */ public static boolean startsWithIgnoreCase(String str, String start) { return str.regionMatches(true, 0, start, 0, start.length()); } /** Return true if the string has any consecutive repeated characters */ public static boolean hasRepeatedChar(String str) { if (str.length() < 2) { return false; } for (int ix = str.length() - 2; ix >= 0; ix--) { if (str.charAt(ix) == str.charAt(ix+1)) { return true; } } return false; } /** Remove the substring beginning with the final occurrence of the * separator, if any. */ public static String upToFinal(String str, String sep) { int pos = str.lastIndexOf(sep); if (pos < 0) { return str; } return str.substring(0, pos); } /** Iff the string ends with <code>end</code>, remove it. */ public static String removeTrailing(String str, String end) { if (str.endsWith(end)) { return str.substring(0, str.length() - end.length()); } return str; } /* Return the substring following the final dot */ public static String shortName(Object object) { if (object == null) { return null; } String name = object.toString(); return name.substring(name.lastIndexOf('.')+1); } /* Return the non-qualified name of the class */ public static String shortName(Class clazz) { String className = clazz.getName(); return className.substring(className.lastIndexOf('.')+1); } /* Return the non-qualified name of the method (Class.method) */ public static String shortName(Method method) { return shortName(method.getDeclaringClass()) + "." + method.getName(); } public static String sanitizeToIdentifier(String name) { StringBuilder sb = new StringBuilder(); for (int ix = 0; ix < name.length(); ix++) { char ch = name.charAt(ix); if (Character.isJavaIdentifierPart(ch)) { sb.append(ch); } } return sb.toString(); } static Pattern alphanum = RegexpUtil.uncheckedCompile("([^a-zA-Z0-9])", Perl5Compiler.READ_ONLY_MASK); /** Return a copy of the string with all non-alphanumeric chars * escaped by backslash. Useful when embedding an unknown string in * a regexp */ public static String escapeNonAlphaNum(String str) { Substitution subst = new Perl5Substitution("\\\\$1"); return Util.substitute(RegexpUtil.getMatcher(), alphanum, subst, str, Util.SUBSTITUTE_ALL); } private static java.util.regex.Pattern COMBINING_DIACRIT_PAT = java.util.regex.Pattern.compile("\\p{InCombiningDiacriticalMarks}+"); /** * Normalize string by removing diacritical marks. * @param s the string * @return the string with diacritical marks removed */ static public String toUnaccented(String s) { return COMBINING_DIACRIT_PAT.matcher(Normalizer.normalize(s, Form.NFD)).replaceAll(""); } // /** Accented character table for use by {@link #toUnaccentedFast(String). // * See comments there. */ // private static final String[][] ACCENTTABLE = { // {"\u00c0","A"}, // À, A with grave // {"\u00c1","A"}, // Á, A with acute // {"\u00c2","A"}, // Â, A with circumflex // {"\u00c3","A"}, // Â, A with tilde // {"\u00c4","A"}, // Ä, A with diaeresis // {"\u00c5","A"}, // Å, A with ring above // {"\u00c6","AE"}, // Æ, AE // {"\u00c7","C"}, // Ç, C with cedilla // {"\u00c8","E"}, // È, E with grave // {"\u00c9","E"}, // É, E with acute // {"\u00ca","E"}, // Ê, E with circumflex // {"\u00cb","E"}, // Ë, E with diaeresis // {"\u00cc","I"}, // Ì, I with grave // {"\u00cd","I"}, // Í, I with acute // {"\u00ce","I"}, // Î, I with circumflex // {"\u00cf","I"}, // Ï, I with diaeresis // {"\u00d1","N"}, // Ñ, N with tilde // {"\u00d2","O"}, // Ò, O with grave // {"\u00d3","O"}, // Ó, O with acute // {"\u00d4","O"}, // Ô, O with circumflex // {"\u00d5","O"}, // Õ, O with tilde // {"\u00d6","O"}, // Ö, O with diaeresis // {"\u00d8","O"}, // Ø, O with a stroke // {"\u00d9","U"}, // Ù, U with grave // {"\u00da","U"}, // Ú, U with acute // {"\u00db","U"}, // Û, U with circumflex // {"\u00dc","U"}, // Ü, U with diaeresis // {"\u00dd","Y"}, // Ý, Y with acute // {"\u00e0","a"}, // à, a with grave // {"\u00e1","a"}, // á, a with acute // {"\u00e2","a"}, // â, a with circumflex // {"\u00e3","a"}, // ã, a with tilde // {"\u00e4","a"}, // ä, a with diaeresis // {"\u00e5","a"}, // å, a with ring above // {"\u00e6","ae"}, // æ, ae // {"\u00e7","c"}, // ç, c with cedilla // {"\u00e8","e"}, // è, e with grave // {"\u00e9","e"}, // é, e with acute // {"\u00ea","e"}, // ê, e with circumflex // {"\u00eb","e"}, // ë, e with diaeresis // {"\u00ec","i"}, // ì, i with grave // {"\u00ed","i"}, // í, i with acute // {"\u00ee","i"}, // î, i with circumflex // {"\u00ef","i"}, // ï, i with diaeresis // {"\u00f1","n"}, // ñ, n with tilde // {"\u00f2","o"}, // ò, o with grave // {"\u00f3","o"}, // ó, o with acute // {"\u00f4","o"}, // ô, o with circumflex // {"\u00f5","o"}, // õ, o with tilde // {"\u00f6","o"}, // ö, o with diaeresis // {"\u00f8","o"}, // ø, o with stroke // {"\u00f9","u"}, // ù, u with grave // {"\u00fa","u"}, // ú, u with acute // {"\u00fb","u"}, // û, u with circumflex // {"\u00fc","u"}, // ü, u with diaeresis // {"\u00fd","y"}, // ý, y with acute // {"\u00ff","y"}, // ÿ, y with diaeresis // }; // private static char[] AC_CHAR = new char[ACCENTTABLE.length]; // private static String[] AC_REP = new String[ACCENTTABLE.length]; // static { // for (int ix = 0; ix < ACCENTTABLE.length; ix++) { // AC_CHAR[ix] = ACCENTTABLE[ix][0].charAt(0); // AC_REP[ix] = ACCENTTABLE[ix][1]; // } // } // /** Alternate implementation of {@link @toUnaccented(String)}. Can be // * several times faster (depending on string length) but only handles // * accented characters that are in its table. */ // public static String toUnaccentedFast(String s) { // boolean modified = false; // int slen = s.length(); // StringBuilder sb = null; // outer: // for (int ix = 0; ix < slen; ix++) { // char ch = s.charAt(ix); // for (int jx = 0; jx < AC_CHAR.length; jx++) { // if (AC_CHAR[jx] == ch) { // if (!modified) { // sb = new StringBuilder(slen); // sb.append(s, 0, ix); // modified = true; // } // sb.append(AC_REP[jx]); // continue outer; // } // } // if (modified) { // sb.append(ch); // } // } // if (modified) { // return sb.toString(); // } else { // return s; // } // } // static String toUnaccentedFast0(String s) { // for (int iy = 0; iy < ACCENTTABLE.length; iy++) { // s = StringUtil.replaceString(s, ACCENTTABLE[iy][0], ACCENTTABLE[iy][1]); // } // return s; // } /** Escape values (and keys) to be included in a comma-separated string * of key=value. Comma, equals and backslash are escaped with * backslash */ public static String ckvEscape(String s) { if (s.indexOf('\\') < 0 && s.indexOf(',') < 0 && s.indexOf('=') < 0) { return s; } int len = s.length(); StringBuilder sb = new StringBuilder(len + 8); for (int ix = 0; ix < len; ix++) { char c = s.charAt(ix); switch(c) { // Special characters case '\\': sb.append("\\\\"); break; case ',': sb.append("\\,"); break; case '=': sb.append("\\="); break; default: sb.append(c); break; } } return sb.toString(); } /** Encode a string to be included in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled */ public static String csvEncode(String s) { if (s.indexOf('"') >= 0) { int len = s.length(); StringBuilder sb = new StringBuilder(len + 5); sb.append("\""); for (int ix = 0; ix < len; ix++) { char c = s.charAt(ix); switch (c) { case '\"': sb.append("\"\""); break; default: sb.append(c); break; } } sb.append("\""); return sb.toString(); } if (s.indexOf(' ') >= 0 || s.indexOf(',') >= 0) { StringBuilder sb = new StringBuilder(s.length() + 2); sb.append("\""); sb.append(s); sb.append("\""); return sb.toString(); } return s; } /** Encode an array of strings to form a row in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled. All encoded values are * combined with comma separators. */ public static String csvEncodeValues(String[] values) { if (values==null || values.length==0) return ""; //return csvEncodeValues(Arrays.asList(values)); StringBuilder sb = new StringBuilder(); // Build the string for those values which need the separator appended for (int i=0; i<values.length-1; i++) { sb.append(csvEncode(values[i]) + ","); } // Add the last item sb.append(csvEncode(values[values.length-1])); return sb.toString(); } /** Encode a list of strings to form a row in a CSV. Values containing comma, * space or quote are quoted, quotes are doubled. All encoded values are * combined with comma separators. */ public static String csvEncodeValues(List<String> values) { if (values==null || values.size()==0) return ""; return csvEncodeValues(values.toArray(new String[]{})); } /** * Returns the number of instances of a particular substring in a string. * This ignores overlap, starting from the left, so 'xxxxxy' would have * 2 instances of 'xx', not 4. Empty string as a substring returns 0. */ public static int countOccurences(String str, String subStr) { int len = subStr.length(); if (len == 0) { return 0; } int pos = 0; int count = 0; while ((pos = str.indexOf(subStr, pos)) >= 0) { count++; pos += len; } return count; } /** Retpresents an integer width,height pair */ public static class CharWidthHeight { int width; int height; private CharWidthHeight(int w, int h) { width = w; height = h; } public int getWidth() { return width; } public int getHeight() { return height; } } /** Calculates the number of lines and the width (in characters) of the * longest line in a String. Not particularly efficient, shouldn't be * used on huge Strings */ public static CharWidthHeight countWidthAndHeight(String str) { if (str == null || str.length() == 0) { return new CharWidthHeight(0, 0); } int height = 1; int width = 0; int maxWidth = 0; int len = str.length(); for (int pos = 0; pos < len; pos++) { char c = str.charAt(pos); if (c == '\r' || c == '\n') { if (c == '\r' && pos+1 < len && str.charAt(pos+1) == '\n') { pos++; } // Don't increment height if string ends with a newline if (pos+1 < len) { height++; } if (width > maxWidth) { maxWidth = width; } width = 0; } else { width++; } } if (width > maxWidth) { maxWidth = width; } return new CharWidthHeight(maxWidth, height); } /** * Get the text between two other chunks of text. * @param line a String containing some text sandwiched between two known pieces of text * @param beginFlag the String coming before the required text * @param endFlag the String coming after the required text * @return the extracted text, or null if the string did not fit the specified format * @author Neil Mayo */ public static String getTextBetween(String line, String beginFlag, String endFlag){ int tBegin = StringUtil.indexOfIgnoreCase(line, beginFlag); // Get the first position of the endFlag appearing after the beginFlag tBegin += beginFlag.length(); int tEnd = StringUtil.indexOfIgnoreCase(line, endFlag, tBegin); // Check that the flags were found if(tBegin < 0 || tEnd<0 || tBegin>tEnd) { return null; } return line.substring(tBegin, tEnd); } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param r a Reader * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(final Reader r) { return new Reader() { boolean saw_CR = false; final char[] cb = new char[1]; public int read(char cbuf[], int off, int len) throws IOException { int i; int n = 0; for (i = 0; i < len; i++) { if ((n = r.read(cb, 0, 1)) <= 0) { break; } if (saw_CR) { saw_CR = false; if (cb[0] == '\n') { if (r.read(cb, 0, 1) <= 0) { break; } } } if (cb[0] == '\r') { saw_CR = true; cb[0] = '\n'; } cbuf[off+i] = cb[0]; } return (i == 0) ? n : i; } public void close() throws IOException { r.close(); } }; } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param in an input stream * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(InputStream in) { return getLineReader(in, null); } /** Return a reader that transforms platform newline sequences to standard * newline characters. * @param in an input stream * @param encoding the character encoding * @return a filtered reader that transforms platform newline sequences to standard * newline characters. */ public static Reader getLineReader(InputStream in, String encoding) { return getLineReader(StreamUtil.getReader(in, encoding)); } /** Return a reader that removes backslash-newline sequences * (line-continuation) * @param r a Reader * @return a filtered reader that removes line-continuation sequences */ public static Reader getLineContinuationReader(final Reader r) { return new Reader() { boolean saw_bslash = false; final char[] cb = new char[1]; int lastch = -1; public int read(char cbuf[], int off, int len) throws IOException { int i; int n = 0; int endoff = off + len; while (off < endoff) { // if have a character waiting, emit it if (lastch >= 0) { cbuf[off++] = (char)lastch; lastch = -1; } else { if ((n = r.read(cb, 0, 1)) <= 0) { // end of input. do we have a hanging backslash? if (saw_bslash) { cbuf[off++] = '\\'; saw_bslash = false; } break; } switch (cb[0]) { case '\\': if (saw_bslash) { // if already seen a backslash, output that one cbuf[off++] = '\\'; } else { saw_bslash = true; } break; case '\n': if (saw_bslash) { saw_bslash = false; } else { cbuf[off++] = cb[0]; } break; default: if (saw_bslash) { cbuf[off++] = '\\'; saw_bslash = false; lastch = cb[0]; } else { cbuf[off++] = cb[0]; } break; } } } int nread = len - (endoff - off); return nread == 0 ? -1 : nread; } public void close() throws IOException { r.close(); } }; } /** Reads a line from a BufferedReader, interpreting backslash-newline as * line-continuation. */ public static String readLineWithContinuation(BufferedReader rdr) throws IOException { StringBuilder sb = null; while (true) { String s = rdr.readLine(); if (s == null) { if (sb == null || sb.length() == 0) { return null; } else { return sb.toString(); } } if (s.endsWith("\\")) { if (sb == null) { sb = new StringBuilder(120); } sb.append(s, 0, s.length() - 1); } else if (sb == null || sb.length() == 0) { return s; } else { sb.append(s); return sb.toString(); } } } /** Reads line from a BuffereReader into a StringBuilder, interpreting * backslash-newline as line-continuation, until either end-of-stream or * maxSize chars read. May read one more line beyond maxSize. */ public static boolean readLinesWithContinuation(BufferedReader rdr, StringBuilder sb, int maxSize) throws IOException { while (true) { String s = rdr.readLine(); if (s == null) { return sb.length() != 0; } if (s.endsWith("\\")) { sb.append(s, 0, s.length() - 1); continue; } sb.append(s, 0, s.length()); sb.append("\n"); if (sb.length() >= maxSize) { return true; } } } /** Return a string with lines from a reader, separated by a newline * character. The reader is not closed. Throw if more than maxSize * chars. Reader is wrapped with a reader returned by {@link * #getLineReader(Reader) before processing. */ public static String fromReader(Reader r, int maxSize) throws IOException { r = getLineReader(r); char[] buf = new char[1000]; StringBuilder sb = new StringBuilder(1000); int len; while ((len = r.read(buf)) >= 0) { sb.append(buf, 0, len); if (maxSize > 0 && sb.length() > maxSize) { throw new FileTooLargeException(); } } return sb.toString(); } /** Read chars from a Reader into a StringBuilder up to maxSize. * @param r the Reader to read from * @param sb the StringBuilder to fill * @param maxChars maximum number of chars to read * @return true if anything was read (false if reader was already at * eof) */ public static boolean fillFromReader(Reader r, StringBuilder sb, int maxChars) throws IOException { char[] buf = new char[1000]; int tot = 0; int len; while (tot < maxChars && (len = r.read(buf, 0, Math.min(buf.length, maxChars - tot))) >= 0) { sb.append(buf, 0, len); tot += len; } return tot != 0; } /** Return a string with lines from a reader, separated by a newline character. * Reader is wrapped with a reader returned by {@link #getLineReader(Reader) * before processing. */ public static String fromReader(Reader r) throws IOException { return fromReader(r, -1); } /** Return a string with lines from an InputStream separated by a newline * character using the default encoding*/ public static String fromInputStream(InputStream in) throws IOException { // use our default encoding rather than system default return fromReader(new InputStreamReader(in, Constants.DEFAULT_ENCODING)); } /** Return a string with lines from an InputStream separated by a newline * character using the default encoding. The InputStream is not closed. * Throw if more than maxSize chars */ public static String fromInputStream(InputStream in, int maxSize) throws IOException { // use our default encoding rather than system default return fromReader(new InputStreamReader(in, Constants.DEFAULT_ENCODING), maxSize); } /** Return a string with lines from the file path separated by a newline character */ public static String fromFile(String path) throws IOException { return fromFile(new File(path)); } /** Return a string with lines from the file separated by a newline character */ public static String fromFile(File file) throws IOException { Reader rdr = new FileReader(file); try { return fromReader(rdr); } finally { IOUtil.safeClose(rdr); } } /** Write a string to a File */ public static void toFile(File file, String s) throws IOException { Writer w = new BufferedWriter(new FileWriter(file)); try { StringUtil.toWriter(w, s); } finally { IOUtil.safeClose(w); } } /* Write the string to the OutputStream */ public static void toOutputStream(OutputStream out, String s) throws IOException { toWriter(new BufferedWriter(new OutputStreamWriter(out)),s); } /* Write the string to the Writer */ public static void toWriter(Writer w, String s) throws IOException { w.write(s); w.flush(); } /** * Test whether a string is null or the empty string * @param s the string * @return true if s is null or the empty string */ public static boolean isNullString(String s) { return s == null || s.length() == 0; } /** * Test whether a string contains only 7-bit ascii characters * @param s the string * @return true if all chars in s are ascii */ public static boolean isAscii(String s) { for (int ix = 0; ix < s.length(); ix++) { if (s.charAt(ix) > 0x7F) { return false; } } return true; } /** * Same as str.compareTo(str), except null is the lowest value. */ public static int compareToNullLow(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : -1; } if (str2 == null) { return 1; } return str1.compareTo(str2); } /** * Same as str.compareTo(str), except null is the highest value. */ public static int compareToNullHigh(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : 1; } if (str2 == null) { return -1; } return str1.compareTo(str2); } /** * Comparison that matches the traversal order of CachedUrlSet iterators. * Differs from natural sort order in that '/' sorts before any other * char, because the tree traversal is pre-order. */ public static int preOrderCompareTo(String str1, String str2) { int len1 = str1.length(); int len2 = str2.length(); int n = Math.min(len1, len2); for (int ix = 0; ix < n; ix++) { char c1 = str1.charAt(ix); char c2 = str2.charAt(ix); if (c1 != c2) { if (c1 == '/') { return -1; } if (c2 == '/') { return 1; } return c1 - c2; } } return len1 - len2; } /** * Comparison that matches the traversal order of CachedUrlSet iterators. * Differs from natural sort order in that '/' sorts before any other * char, because the tree traversal is pre-order. Null sorts after all * nun-null strings. */ public static int preOrderCompareToNullHigh(String str1, String str2) { if (str1 == null) { return (str2 == null) ? 0 : 1; } if (str2 == null) { return -1; } return StringUtil.preOrderCompareTo(str1, str2); } public static Comparator PRE_ORDER_COMPARATOR = new PreOrderComparator(); /** Comparator that aorts in pre-order traversal order. null is not * permitted. */ public static class PreOrderComparator implements Comparator<String> { public int compare(String s1, String s2) { return StringUtil.preOrderCompareTo(s1, s2); } } /** * Compare two strings for equality or both null. * @param s1 string 1 * @param s2 string 2 * @return true if strings are equal or both null */ public static boolean equalStrings(String s1, String s2) { if (s1 == null) { return s2 == null; } else { return s1.equals(s2); } } /** * Compare two strings for case-independent equality or both null. * @param s1 string 1 * @param s2 string 2 * @return true if strings are equal or both null */ public static boolean equalStringsIgnoreCase(String s1, String s2) { if (s1 == null) { return s2 == null; } else { return s1.equalsIgnoreCase(s2); } } /** Sort a set of strings case-independently */ public static Set<String> caseIndependentSortedSet(Collection<String> coll) { Set<String> res = new TreeSet(new CaseIndependentComparator()); res.addAll(coll); return res; } public static class CaseIndependentComparator implements Comparator<String> { public int compare(String s1, String s2) { // Don't allow null to cause NPE if (s1 == null) { return (s2 == null ? 0 : -1); } else if (s2 == null) { return 1; } return s1.compareToIgnoreCase(s2); } } /** Like System.arrayCopy, for characters within one StringBuilder. * @param sb the buffer * @param srcPos chars copied starting from here * @param destPos chars copied starting to here * @param len number of chars copied */ public static void copyChars(StringBuilder sb, int srcPos, int destPos, int len) { if (srcPos > destPos) { while (--len >= 0) { sb.setCharAt(destPos++, sb.charAt(srcPos++)); } } else if (srcPos < destPos) { while (--len >= 0) { sb.setCharAt(destPos + len, sb.charAt(srcPos + len)); } } } private static long gensymCtr = 0; /** * Generate a unique string. * @param base the initial substring * @return a string consisting of the supplied initial substring and a * unique counter value. */ public static String gensym(String base) { return base + (gensymCtr++); } /** Return a string of n spaces */ public static String tab(int n) { return StringUtils.repeat(" ", n); } /** * Trim a hostname, removing "www." from the front, if present, and the * TLD from the end. If this would result in an empty string, the entire * name is returned. * @param hostname a hostname string * @return the trimmed hostname */ public static String trimHostName(String hostname) { if (hostname == null) return null; int start = 0; if (hostname.regionMatches(true, 0, "www.", 0, 4)) { start = 4; } int end = hostname.lastIndexOf('.'); if (end <= start) { // if trimming www left nothing but TLD, return whole name return hostname; } return hostname.substring(start, end); } /** Parse a string as a time interval. An interval is specified as an * integer with an optional suffix. No suffix means milliseconds, s, m, * h, d, w indicates seconds, minutes, hours, days and weeks * respectively. As a special case, "ms" means milliseconds. * @param str the interval string * @return interval in milliseconds */ // tk - extend to accept combinations: xxHyyMzzS, etc. public static long parseTimeInterval(String str) { try { int len = str.length(); char suffix = str.charAt(len - 1); String numstr; long mult = 1; if (Character.isDigit(suffix)) { numstr = str; } else { if (StringUtil.endsWithIgnoreCase(str, "ms")) { numstr = str.substring(0, len - 2); } else { numstr = str.substring(0, len - 1); switch (Character.toUpperCase(suffix)) { case 'S': mult = Constants.SECOND; break; case 'M': mult = Constants.MINUTE; break; case 'H': mult = Constants.HOUR; break; case 'D': mult = Constants.DAY; break; case 'W': mult = Constants.WEEK; break; case 'Y': mult = Constants.YEAR; break; default: throw new NumberFormatException("Illegal time interval suffix"); } } } return Long.parseLong(numstr) * mult; } catch (IndexOutOfBoundsException e) { throw new NumberFormatException("empty string"); } } private static Pattern sizePat = RegexpUtil.uncheckedCompile("^([0-9.]+)\\s*([a-zA-Z]*)", Perl5Compiler.READ_ONLY_MASK); private static String suffixes[] = {"b", "KB", "MB", "GB", "TB", "PB"}; /** Parse a string as a size in bytes, with a optional suffix. No suffix * means bytes, kb, mb, gb, tb indicate kilo-, mega-, giga, tera-bytes * respectively. * @param str the size string * @return size in bytes */ public static long parseSize(String str) { str = str.trim(); Perl5Matcher matcher = RegexpUtil.getMatcher(); if (!matcher.contains(str, sizePat)) { throw new NumberFormatException("Illegal size syntax: " + str); } MatchResult matchResult = matcher.getMatch(); String num = matchResult.group(1); String suffix = matchResult.group(2); try { float f = Float.parseFloat(num); long mult = 1; if (StringUtil.isNullString(suffix)) { return (long)f; } for (int ix = 0; ix < suffixes.length; ix++) { if (suffix.equalsIgnoreCase(suffixes[ix])) { return (long)(f * mult); } mult *= 1024; } throw new NumberFormatException("Illegal size suffix: " + str); } catch (NumberFormatException ex) { throw new NumberFormatException("Illegal size syntax: " + str); } } /** Trim leading and trailing blank lines from a block of text */ public static String trimBlankLines(String txt) { StringBuilder buf = new StringBuilder(txt); while (buf.length()>0 && buf.charAt(0) == '\n') { buf.deleteCharAt(0); } while (buf.length()>0 && buf.charAt(buf.length() - 1) == '\n') { buf.deleteCharAt(buf.length() - 1); } return buf.toString(); } private static Pattern nonUnixEol = RegexpUtil.uncheckedCompile("\\r\\n?", Perl5Compiler.MULTILINE_MASK); /** Normalize newlines to Unix eol */ public static String normalizeEols(String str) { Substitution subst = new Perl5Substitution("\n"); return Util.substitute(RegexpUtil.getMatcher(), nonUnixEol, subst, str, Util.SUBSTITUTE_ALL); } private static Pattern nlEol = RegexpUtil.uncheckedCompile("([\n\r][\n\t ]*)", Perl5Compiler.MULTILINE_MASK); /** Trim EOLs and leading whitespace from a block of text */ public static String trimNewlinesAndLeadingWhitespace(String str) { if (str.indexOf("\n") == -1 && str.indexOf("\r") == -1) { return str; } Substitution subst = new Perl5Substitution(""); return Util.substitute(RegexpUtil.getMatcher(), nlEol, subst, str, Util.SUBSTITUTE_ALL); } // Unit Descriptor private static class UD { String str; // suffix string long millis; // milliseconds in unit int threshold; // min units to output String stop; // last unit to output if this matched UD(String str, long millis) { this(str, millis, 1); } UD(String str, long millis, int threshold) { this(str, millis, threshold, null); } UD(String str, long millis, int threshold, String stop) { this.str = str; this.millis = millis; this.threshold = threshold; this.stop = stop; } } static UD units[] = { new UD("w", Constants.WEEK, 3, "h"), new UD("d", Constants.DAY, 1, "m"), new UD("h", Constants.HOUR), new UD("m", Constants.MINUTE), new UD("s", Constants.SECOND, 0), }; public static String protectedDivide(long numerator, long denominator) { return protectedDivide(numerator, denominator, "inf"); } public static String protectedDivide(long numerator, long denominator, String infStr) { if (denominator == 0) { return infStr; } long val = numerator / denominator; return String.valueOf(val); } /** Generate a string representing the time interval. * @param millis the time interval in milliseconds * @return a string in the form dDhHmMsS */ public static String timeIntervalToString(long millis) { StringBuilder sb = new StringBuilder(); if (millis < 0) { sb.append("-"); millis = -millis; } return posTimeIntervalToString(millis, sb); } private static String posTimeIntervalToString(long millis, StringBuilder sb) { if (millis < 10 * Constants.SECOND) { sb.append(millis); sb.append("ms"); } else { boolean force = false; String stop = null; for (int ix = 0; ix < units.length; ix++) { UD iu = units[ix]; long n = millis / iu.millis; if (force || n >= iu.threshold) { millis %= iu.millis; sb.append(n); sb.append(iu.str); force = true; if (stop == null) { if (iu.stop != null) { stop = iu.stop; } } else { if (stop.equals(iu.str)) { break; } } } } } return sb.toString(); } /** Generate a more verbose string representing the time interval. * @param millis the time interval in milliseconds * @return a string in the form "<d> days, <h> hours, <m> minutes, <s> * seconds" */ public static String timeIntervalToLongString(long millis) { StringBuilder sb = new StringBuilder(); long temp = 0; if (millis < 0) { sb.append("-"); millis = -millis; } if (millis >= Constants.SECOND) { temp = millis / Constants.DAY; if (temp > 0) { sb.append(numberOfUnits(temp, "day")); millis -= temp * Constants.DAY; if (millis >= Constants.MINUTE) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.HOUR; if (temp > 0) { sb.append(numberOfUnits(temp, "hour")); millis -= temp * Constants.HOUR; if (millis >= Constants.MINUTE) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.MINUTE; if (temp > 0) { sb.append(numberOfUnits(temp, "minute")); millis -= temp * Constants.MINUTE; if(millis >= Constants.SECOND) { sb.append(DEFAULT_COLLECTION_SEPARATOR); } } temp = millis / Constants.SECOND; if (temp > 0) { sb.append(numberOfUnits(temp, "second")); } return sb.toString(); } else { return "0 seconds"; } } private static final NumberFormat fmt_1dec = new DecimalFormat("0.0"); private static final NumberFormat fmt_0dec = new DecimalFormat("0"); static final String[] byteSuffixes = {"KB", "MB", "GB", "TB", "PB"}; public static String sizeToString(long size) { if (size < 1024) { return size + "B"; } return sizeKBToString(size / 1024); } public static String sizeKBToString(long size) { double base = 1024.0; double x = (double)size; int len = byteSuffixes.length; for (int ix = 0; ix < len; ix++) { if (x < base || ix == len-1) { StringBuilder sb = new StringBuilder(); if (x < 10.0) { sb.append(fmt_1dec.format(x)); } else { sb.append(fmt_0dec.format(x)); } sb.append(byteSuffixes[ix]); return sb.toString(); } x = x / base; } return ""+size; } /** Remove the first line of the stack trace, iff it duplicates the end * of the exception message */ public static String trimStackTrace(String msg, String trace) { int pos = trace.indexOf("\n"); if (pos > 0) { String l1 = trace.substring(0, pos); if (msg.endsWith(l1)) { return trace.substring(pos + 1); } } return trace; } /** Translate an exception's stack trace to a string. */ public static String stackTraceString(Throwable th) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); th.printStackTrace(pw); return sw.toString(); } /** Convert the first character and every character that follows a space * to uppercase. */ public static String titleCase(String txt) { return titleCase(txt, ' '); } /** Convert the first character and every character that follows the * separator char to uppercase. */ public static String titleCase(String txt, char separator) { int len = txt.length(); if (len == 0) { return ""; } StringBuilder buf = new StringBuilder(txt); buf.setCharAt(0,Character.toUpperCase(buf.charAt(0))); for (int i=1; i<len; i++) { if (buf.charAt(i-1)==separator) { buf.setCharAt(i,Character.toUpperCase(buf.charAt(i))); } } return buf.toString(); } public static class FileTooLargeException extends IOException { public FileTooLargeException() { super(); } public FileTooLargeException(String message) { super(message); } } /** * Returns the index of the nth occurrence of searchStr in sourceStr or -1 * if there aren't n instances of searchStr in sourceStr */ public static int nthIndexOf(int n, String sourceStr, String searchStr) { if (n <= 0) { return -1; } int idx = -1; do { idx = sourceStr.indexOf(searchStr, idx+1); } while (--n > 0 && idx >= 0); return idx; } /** * Scans through the reader looking for the String str; case sensitive * @param reader Reader to search; it will be at least partially consumed * @return true if the string is found, false if the end of reader is * reached without finding the string */ public static boolean containsString(Reader reader, String str) throws IOException { return StringUtil.containsString(reader, str, false); } public static boolean containsString(Reader reader, String str, int buffSize) throws IOException { return containsString(reader, str, false, buffSize); } public static boolean containsString(Reader reader, String str, boolean ignoreCase) throws IOException { return containsString(reader, str, ignoreCase, 4096); } /** * Scans through the reader looking for the String str * @param reader Reader to search; it will be at least partially consumed * @param ignoreCase whether to ignore case or not * @return true if the string is found, false if the end of reader is * reached without finding the string */ public static boolean containsString(Reader reader, String str, boolean ignoreCase, int buffSize) throws IOException { if (reader == null) { throw new NullPointerException("Called with a null reader"); } else if (str == null) { throw new NullPointerException("Called with a null string"); } else if (str.length() == 0) { throw new IllegalArgumentException("Called with a blank String"); } else if (buffSize <= 0) { throw new IllegalArgumentException("Called with a buffSize < 0"); } int strlen = str.length(); // simpiify boundary conditions by ensuring buffer always larger than // search string buffSize = Math.max(buffSize, strlen * 2); int shiftSize = buffSize - (strlen - 1); String regex = java.util.regex.Pattern.quote(str); int flags = ignoreCase ? java.util.regex.Pattern.CASE_INSENSITIVE : 0; java.util.regex.Pattern pat = java.util.regex.Pattern.compile(regex, flags); StringBuilder sb = new StringBuilder(buffSize); while (StringUtil.fillFromReader(reader, sb, buffSize - sb.length())) { java.util.regex.Matcher m1 = pat.matcher(sb); if (m1.find()) { return true; } if (sb.length() < buffSize) { // avoid unnecessary shift on final iteration return false; } sb.delete(0, shiftSize); } return false; } // BoyerMoore.java throws on some inputs with ignoreCase = true. // This implementation retired pending a fix. /*public*/ static boolean containsString_bug(Reader reader, String str, boolean ignoreCase, int buffSize) throws IOException { if (reader == null) { throw new NullPointerException("Called with a null reader"); } else if (str == null) { throw new NullPointerException("Called with a null string"); } else if (str.length() == 0) { throw new IllegalArgumentException("Called with a blank String"); } else if (buffSize <= 0) { throw new IllegalArgumentException("Called with a buffSize < 0"); } if (ignoreCase) { str = str.toLowerCase(); } char[] buff = new char[buffSize]; BoyerMoore bm = new BoyerMoore(ignoreCase); bm.compile(str); int bcount; int numPartialMatch = 0; while ((bcount = StreamUtil.readChars(reader, buff, buff.length)) > 0) { if (numPartialMatch > 0 && bcount >= (str.length() - numPartialMatch)) { //we previously matched this many chars at the end of the last buff if (log.isDebug3()) { log.debug3("Found a partial match before in last buffer: "+ str.substring(numPartialMatch)+"; looking for the rest"); } if (startsWith(buff, str.substring(numPartialMatch), ignoreCase)) { if (log.isDebug3()) {log.debug3("Found the second half of a partial match");} return true; } } if (bm.search(buff, 0, bcount) >= 0) { if (log.isDebug3()) {log.debug3("Found a full match in one buffer");} return true; } else { numPartialMatch = bm.partialMatch(); if (log.isDebug3() && numPartialMatch != 0) { log.debug3("Found a partial match of "+numPartialMatch); } } } return false; } /** * * @return true if the first str.length() chars in buffer match str */ private static boolean startsWith(char[]buffer, String str, boolean ignoreCase) { for (int ix=0; ix<(str.length()); ix++) { if (Character.toLowerCase(str.charAt(ix)) != Character.toLowerCase(buffer[ix])) { if (log.isDebug3()) { log.debug3(str.charAt(ix)+" didn't match "+ buffer[ix]); } return false; } } return true; } /** Return a string like "0 units", "1 unit", "n units" * @param number the number of whatever units * @param unit Single form of unit, plural formed by adding "s" */ public static String numberOfUnits(long number, String unit) { return numberOfUnits(number, unit, unit + "s"); } /** Return a string like "0 units", "1 unit", "n units" * @param number the number of whatever units * @param unit Single form of unit * @param pluralUnit plural form of unit */ public static String numberOfUnits(long number, String unit, String pluralUnit) { if (number == 1) { return number + " " + unit; } else { return number + " " + pluralUnit; } } public static boolean equalsIgnoreCase(char kar1, char kar2) { return (Character.toLowerCase(kar1) == Character.toLowerCase(kar2)); } /** * Provides a 64-bit hash of a text string. * * @param payload * A String for which the hash is to be provided. * @return a long with the 64-bit hash of the text string. * @throws Exception * if there are problems computing the LOCKSS identifier. */ public static long hash64(String payload) { try { byte[] digest = MessageDigest.getInstance("MD5").digest(payload.getBytes("UTF-8")); return new BigInteger(1, digest).longValue(); } catch (NoSuchAlgorithmException nsae) { throw new RuntimeException(nsae); } catch (UnsupportedEncodingException uea) { throw new RuntimeException(uea); } } private static java.util.regex.Pattern BLANK_NLS_AND_TABS_PATTERN = java.util.regex.Pattern.compile("[\n\t]+"); /** * Blanks out newlines and tabs in a text string. * * @param text * A String with the text string to be processed. * @return a String with newlines and tabs replaced by spaces. */ public static String blankOutNlsAndTabs(String text) { if (text != null) { return BLANK_NLS_AND_TABS_PATTERN.matcher(text).replaceAll(" "); } return ""; } /** * <p> * A regular expression that matches various Unicode quotation marks: * </p> * <ul> * <li>U+0022 QUOTATION MARK</li> * <li>U+201C LEFT DOUBLE QUOTATION MARK</li> * <li>U+201D RIGHT DOUBLE QUOTATION MARK</li> * <li>U+201F DOUBLE HIGH-REVERSED-9 QUOTATION MARK</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Quotation_mark_glyphs */ public static final java.util.regex.Pattern QUOTATION_MARKS = java.util.regex.Pattern.compile("\"|\u201c|\u201d|\u201f"); /** * <p> * The canonical quotation mark glyph ({@value #CANONICAL_QUOTATION_MARK}). * </p> * * @since 1.65 */ public static final String CANONICAL_QUOTATION_MARK = "\""; /** * <p> * Replaces all quotation marks that match {@link #QUOTATION_MARKS} with * {@link #CANONICAL_QUOTATION_MARK}. * </p> * * @since 1.65 */ public static String normalizeQuotationMarks(String str) { return QUOTATION_MARKS.matcher(str).replaceAll(CANONICAL_QUOTATION_MARK); } /** * <p> * A regular expression that matches various Unicode apostrophes: * </p> * <ul> * <li>U+0027 APOSTROPHE</li> * <li>U+2018 LEFT SINGLE QUOTATION MARK</li> * <li>U+2019 RIGHT SINGLE QUOTATION MARK</li> * <li>U+201B SINGLE HIGH-REVERSED-9 QUOTATION MARK</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Quotation_mark_glyphs */ public static final java.util.regex.Pattern APOSTROPHES = java.util.regex.Pattern.compile("'|\u2018|\u2019|\u201b"); /** * <p> * The canonical apostrophe glyph ({@value #CANONICAL_APOSTROPHE}). * </p> * * @since 1.65 */ public static final String CANONICAL_APOSTROPHE = "'"; /** * <p> * Replaces all apostrophes that match {@link #APOSTROPHES} with * {@link #CANONICAL_APOSTROPHE}. * </p> * * @since 1.65 */ public static String normalizeApostrophes(String str) { return APOSTROPHES.matcher(str).replaceAll(CANONICAL_APOSTROPHE); } /** * <p> * A regular expression that matches various Unicode dashes: * </p> * <ul> * <li>U+002D HYPHEN-MINUS</li> * <li>U+2012 FIGURE DASH</li> * <li>U+2013 EN DASH</li> * <li>U+2014 EM DASH</li> * <li>U+2015 HORIZONTAL BAR</li> * </ul> * * @since 1.65 * @see http://en.wikipedia.org/wiki/Dash */ public static final java.util.regex.Pattern DASHES = java.util.regex.Pattern.compile("-|\u2012|\u2013|\u2014|\u2015"); /** * <p> * The canonical dash glyph ({@value #CANONICAL_DASH}). * </p> * * @since 1.65 */ public static final String CANONICAL_DASH = "-"; /** * <p> * Replaces all dashes that match {@link #DASHES} with * {@link #CANONICAL_DASH}. * </p> * * @since 1.65 */ public static String normalizeDashes(String str) { return DASHES.matcher(str).replaceAll(CANONICAL_DASH); } /** * Provides a printable version of the contents of an array ob objects, * suitable for logging. * @param theArray An Object[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(Object[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * Provides a printable version of the contents of an array of ints, suitable * for logging. * @param theArray An int[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(int[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * Provides a printable version of the contents of an array of longs, suitable * for logging. * @param theArray A long[] whe the array to be displayed. * @return aString with the printable version of the contents of the array. */ public static String toString(long[] theArray) { // Handle a null array; if (theArray == null) { return NULL_OBJECT_PRINTABLE_TEXT; } return "[" + separatedString(theArray, DEFAULT_COLLECTION_SEPARATOR) + "]"; } /** * <p> * Simply returns the given string, or the empty string <code>""</code> if * null, i.e. guarantees a non-null string as a result. * </p> * * @param str * A string. * @return If <code>str</code> is null, then <code>""</code>, otherwise * <code>str</code>. * @since 1.67 */ public static String nonNull(String str) { return (str == null) ? "" : str; } }
Javadoc, logging.
src/org/lockss/util/StringUtil.java
Javadoc, logging.
Java
bsd-3-clause
5b1c10d4a53811f46681b317941f80bbc5d9f832
0
Team2168/FRC2014_Main_Robot
package org.team2168.commands.drivetrain; import org.team2168.RobotMap; import org.team2168.commands.CommandBase; import org.team2168.subsystems.Vision; import org.team2168.utils.Util; public class RotateDrivetrainRelative extends CommandBase { private double endAngle = 0.0; private double startAngle = 0.0; private double commandedAngle = 0.0; private boolean finished = false; private boolean getAngleFromCam = false; private static double maxSpeed; public static final double kP = 0.01; /** * * @param angle, angle to rotate relative to current position, if known at instantiation time., set next param to false, to rotate to this angle. * @param getAngleFromVision, if you would like this command to rotate to the relative angle provided by the camera at the moment the command is ran, set this boolean to true, the first parameter angle is ignored, and the current output of the Vision subsystem is used instead. */ public RotateDrivetrainRelative(double angle, boolean getAngleFromVision) { requires(drivetrain); this.getAngleFromCam = getAngleFromVision; this.commandedAngle = angle; } /** * this method gets ran once everytime the command is started */ protected void initialize() { finished = false; if (this.getAngleFromCam) commandedAngle = Vision.getInstance().getLeftOrRightHot() * RobotMap.rotationAngleToHot.getDouble(); drivetrain.drive(0, 0); //find current angle and calculate offset startAngle = drivetrain.getGyroAngle(); endAngle = startAngle + commandedAngle; } /** * this method is called periodically until the command finishes */ protected void execute() { double currentAngle = drivetrain.getGyroAngle(); //Use proportional controller to achieve better response of rotation command // calculate current error double error = endAngle - currentAngle; // if the Kp returns more than 1, we limit the speed double speed = Util.limit(RobotMap.rotateDriveKP.getDouble() * error, RobotMap.rotateDriveMaxSpeed.getDouble()); speed = Math.abs(speed); // if command is lower than minspeed needed to move drivetrain, we up it // to the min speed if (speed <= RobotMap.minDriveSpeed.getDouble()) speed = RobotMap.minDriveSpeed.getDouble(); // if you are within your goal angle. Stop if (endAngle < startAngle && currentAngle < endAngle || endAngle > startAngle && currentAngle > endAngle) { // We are done drivetrain.drive(0, 0); finished = true; } else if (currentAngle < endAngle) { // Turn to the right drivetrain.driveRight(-speed); drivetrain.driveLeft(speed); } else { // Turn to the left drivetrain.driveRight(speed); drivetrain.driveLeft(-speed); } } protected void interrupted() { drivetrain.drive(0, 0); } protected boolean isFinished() { return finished; } protected void end() { drivetrain.drive(0, 0); } }
src/org/team2168/commands/drivetrain/RotateDrivetrainRelative.java
package org.team2168.commands.drivetrain; import org.team2168.RobotMap; import org.team2168.commands.CommandBase; import org.team2168.subsystems.Vision; import org.team2168.utils.Util; public class RotateDrivetrainRelative extends CommandBase { private double endAngle = 0.0; private double startAngle = 0.0; private double commandedAngle = 0.0; private boolean finished = false; private boolean getAngleFromCam = false; private static double maxSpeed; public static final double kP = 0.01; /** * * @param angle, angle to rotate relative to current position, if known at instantiation time., set next param to false, to rotate to this angle. * @param getAngleFromVision, if you would like this command to rotate to the relative angle provided by the camera at the moment the command is ran, set this boolean to true, the first parameter angle is ignored, and the current output of the Vision subsystem is used instead. */ public RotateDrivetrainRelative(double angle, boolean getAngleFromVision) { requires(drivetrain); this.getAngleFromCam = getAngleFromVision; this.commandedAngle = angle; } /** * this method gets ran once everytime the command is started */ protected void initialize() { finished = false; if (this.getAngleFromCam) commandedAngle = Vision.getInstance().getCamLeftOrRightHot() * RobotMap.rotationAngleToHot.getDouble(); drivetrain.drive(0, 0); //find current angle and calculate offset startAngle = drivetrain.getGyroAngle(); endAngle = startAngle + commandedAngle; } /** * this method is called periodically until the command finishes */ protected void execute() { double currentAngle = drivetrain.getGyroAngle(); //Use proportional controller to achieve better response of rotation command // calculate current error double error = endAngle - currentAngle; // if the Kp returns more than 1, we limit the speed double speed = Util.limit(RobotMap.rotateDriveKP.getDouble() * error, RobotMap.rotateDriveMaxSpeed.getDouble()); speed = Math.abs(speed); // if command is lower than minspeed needed to move drivetrain, we up it // to the min speed if (speed <= RobotMap.minDriveSpeed.getDouble()) speed = RobotMap.minDriveSpeed.getDouble(); // if you are within your goal angle. Stop if (endAngle < startAngle && currentAngle < endAngle || endAngle > startAngle && currentAngle > endAngle) { // We are done drivetrain.drive(0, 0); finished = true; } else if (currentAngle < endAngle) { // Turn to the right drivetrain.driveRight(-speed); drivetrain.driveLeft(speed); } else { // Turn to the left drivetrain.driveRight(speed); drivetrain.driveLeft(-speed); } } protected void interrupted() { drivetrain.drive(0, 0); } protected boolean isFinished() { return finished; } protected void end() { drivetrain.drive(0, 0); } }
corrected which method is called in vision
src/org/team2168/commands/drivetrain/RotateDrivetrainRelative.java
corrected which method is called in vision
Java
bsd-3-clause
002b1ddd4b363b92269b62c412e7da28f46aed90
0
NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive,NCIP/national-biomedical-image-archive
/** * */ package gov.nih.nci.nbia.domain.operation; import gov.nih.nci.nbia.internaldomain.GeneralImage; import gov.nih.nci.nbia.internaldomain.MRImage; import gov.nih.nci.nbia.util.DicomConstants; import gov.nih.nci.nbia.util.SpringApplicationContext; import java.util.List; import java.util.Map; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; /** * */ public class MRImageOperation extends DomainOperation implements MRImageOperationInterface { private GeneralImage gi; public MRImageOperation() { } @Transactional(propagation = Propagation.REQUIRED) public Object validate(Map numbers) throws Exception { MRImage mri = (MRImage) SpringApplicationContext.getBean("mrimage"); try { String hql = "from MRImage as image where "; hql += (" image.generalImage.id = " + this.gi.getId()); // general_image // pk_id mri.setGeneralImage(gi); mri.setGeneralSeries(gi.getGeneralSeries()); // the update method should be changed to search // mri = (MRImage) this.update(hql, mri); List ret = getHibernateTemplate().find(hql); if (ret != null && ret.size() > 0) { if (ret.size() == 1) { mri = (MRImage) ret.get(0); } else if (ret.size() > 1) { throw new Exception( "mr_image table has duplicate records, please contact Data Team to fix data, then upload data again"); } } populateMRImageFromNumbers(numbers, mri); } catch (Exception e) { // log.error("Exception in MRImageOperation " + e); throw new Exception("Exception in MRImageOperation " + e); } return mri; } public void setGeneralImage(GeneralImage gi) { this.gi = gi; } /** * Given the "numbers" map with all the parsed out dicom tag values we care * about..... populate the general image object with these values. */ private static void populateMRImageFromNumbers(Map numbers, MRImage mri) throws Exception { String temp; if ((temp = (String) numbers.get(DicomConstants.IMAGE_TYPE)) != null) { String[] token = temp.split("\\\\"); if (token.length >= 3) { mri.setImageTypeValue3(token[2]); } } if ((temp = (String) numbers.get(DicomConstants.SCANNING_SEQUENCE)) != null) { mri.setScanningSequence(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.SEQUENCE_VARIANT)) != null) { mri.setSequenceVariant(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.REPETITION_TIME)) != null) { mri.setRepetitionTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.ECHO_TIME)) != null) { mri.setEchoTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.INVERSION_TIME)) != null) { mri.setInversionTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.SEQUENCE_NAME)) != null) { mri.setSequenceName(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.IMAGED_NUCLEUS)) != null) { mri.setImagedNucleus(temp.trim()); } if ((temp = (String) numbers .get(DicomConstants.MAGNETIC_FIELD_STRENGTH)) != null) { mri.setMagneticFieldStrength(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.SAR)) != null) { mri.setSar(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.DB_DT)) != null) { mri.setDbDt(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.TRIGGER_TIME)) != null) { mri.setTriggerTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.ANGIO_FLAG)) != null) { mri.setAngioFlag(temp.trim()); } if (mri.getImageTypeValue3() == null) { throw new Exception("Image Type 3 cannot be null"); } if (mri.getScanningSequence() == null) { throw new Exception("Scanning Sequence cannot be null"); } if (mri.getSequenceVariant() == null) { throw new Exception("Scanning Variant cannot be null"); } } }
software/nbia-ctp/src/gov/nih/nci/nbia/domain/operation/MRImageOperation.java
/** * */ package gov.nih.nci.nbia.domain.operation; import gov.nih.nci.nbia.internaldomain.MRImage; import gov.nih.nci.nbia.internaldomain.GeneralImage; import gov.nih.nci.nbia.util.DicomConstants; import gov.nih.nci.nbia.util.SpringApplicationContext; import java.util.List; import java.util.Map; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; /** * */ public class MRImageOperation extends DomainOperation implements MRImageOperationInterface{ private GeneralImage gi; public MRImageOperation() { } @Transactional(propagation=Propagation.REQUIRED) public Object validate(Map numbers) throws Exception { MRImage mri = (MRImage)SpringApplicationContext.getBean("mrimage"); try { String hql = "from MRImage as image where "; hql += (" image.generalImage.id = " + this.gi.getId()); //general_image pk_id mri.setGeneralImage(gi); mri.setGeneralSeries(gi.getGeneralSeries()); //the update method should be changed to search // mri = (MRImage) this.update(hql, mri); List ret = getHibernateTemplate().find(hql); if(ret != null && ret.size() > 0) { if(ret.size() == 1) { mri = (MRImage)ret.get(0); }else if (ret.size() > 1){ throw new Exception("mr_image table has duplicate records, please contact Data Team to fix data, then upload data again"); } } populateMRImageFromNumbers(numbers, mri); }catch(Exception e) { //log.error("Exception in MRImageOperation " + e); throw new Exception("Exception in MRImageOperation " + e); } return mri; } public void setGeneralImage(GeneralImage gi) { this.gi = gi; } /** * Given the "numbers" map with all the parsed out dicom tag values we * care about..... populate the general image object with these values. */ private static void populateMRImageFromNumbers(Map numbers, MRImage mri) throws Exception { String temp; if ((temp = (String) numbers.get(DicomConstants.IMAGE_TYPE)) != null) { String[] token = temp.split("\\\\"); if (token.length >= 3){ mri.setImageTypeValue3(token[2]); } } if ((temp = (String) numbers.get(DicomConstants.SCANNING_SEQUENCE)) != null) { mri.setScanningSequence(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.SEQUENCE_VARIANT)) != null) { mri.setSequenceVariant(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.REPETITION_TIME)) != null) { mri.setRepetitionTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.ECHO_TIME)) != null) { mri.setEchoTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.INVERSION_TIME)) != null) { mri.setInversionTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.SEQUENCE_NAME)) != null) { mri.setSequenceName(temp.trim()); } if ((temp = (String) numbers.get(DicomConstants.IMAGED_NUCLEUS)) != null) { mri.setImagedNucleus(temp.trim()); } if ((temp = (String) numbers.get( DicomConstants.MAGNETIC_FIELD_STRENGTH)) != null) { mri.setMagneticFieldStrength(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.SAR)) != null) { mri.setSar(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.DB_DT)) != null) { mri.setDbDt(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.TRIGGER_TIME)) != null) { mri.setTriggerTime(Double.valueOf(temp.trim())); } if ((temp = (String) numbers.get(DicomConstants.ANGIO_FLAG)) != null) { mri.setAngioFlag(temp.trim()); } } }
NBIA-527 Added validation for image type 3,Scanning Sequence and Scanning Variant
software/nbia-ctp/src/gov/nih/nci/nbia/domain/operation/MRImageOperation.java
NBIA-527 Added validation for image type 3,Scanning Sequence and Scanning Variant
Java
bsd-3-clause
69d30fbac255c7c2af300d5a6452246308e7ef77
0
joansmith/rultor,pecko/rultor,maurezen/rultor,maurezen/rultor,dalifreire/rultor,pecko/rultor,joansmith/rultor,maurezen/rultor,linlihai/rultor,krzyk/rultor,maurezen/rultor,linlihai/rultor,krzyk/rultor,linlihai/rultor,dalifreire/rultor,pecko/rultor,joansmith/rultor,dalifreire/rultor,linlihai/rultor,krzyk/rultor,joansmith/rultor,dalifreire/rultor,joansmith/rultor,dalifreire/rultor,pecko/rultor,pecko/rultor,krzyk/rultor,linlihai/rultor,maurezen/rultor,krzyk/rultor
/** * Copyright (c) 2009-2015, rultor.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the rultor.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rultor.agents.req; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.jcabi.aspects.Immutable; import com.jcabi.log.Logger; import com.jcabi.xml.XML; import com.rultor.agents.AbstractAgent; import com.rultor.spi.Profile; import java.io.IOException; import java.util.Collections; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.ToString; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.CharEncoding; import org.xembly.Directive; import org.xembly.Directives; /** * Merges. * * @author Yegor Bugayenko (yegor@tpc2.com) * @version $Id$ * @since 1.0 */ @Immutable @ToString @EqualsAndHashCode(callSuper = false, of = "profile") public final class StartsRequest extends AbstractAgent { /** * Default port value to be used with Decrypt. */ private static final String DEFAULT_PORT = "80"; /** * HTTP proxy port system property key. */ private static final String PORT_KEY = "http.proxyPort"; /** * HTTP proxy host system property key. */ private static final String HOST_KEY = "http.proxyHost"; /** * Profile. */ private final transient Profile profile; /** * Ctor. * @param prof Profile */ public StartsRequest(final Profile prof) { super( "/talk/request[@id and type and not(success)]", "/talk[not(daemon)]" ); this.profile = prof; } @Override public Iterable<Directive> process(final XML xml) throws IOException { final XML req = xml.nodes("//request").get(0); final String type = req.xpath("type/text()").get(0); final String hash = req.xpath("@id").get(0); String script; try { script = this.script( req, type, xml.xpath("/talk/@name").get(0) ); Logger.info( this, "request %s/%s started for %s", type, hash, xml.xpath("/talk/@name").get(0) ); } catch (final Profile.ConfigException ex) { script = Logger.format( "cat <<EOT\n%[exception]s\nEOT\nexit -1", ex ); } return new Directives().xpath("/talk") .add("daemon") .attr("id", hash) .add("title").set(type).up() .add("script").set(script); } /** * Make a script. * @param req Request * @param type Its type * @param name Name of talk * @return Script * @throws IOException If fails */ @SuppressWarnings("unchecked") private String script(final XML req, final String type, final String name) throws IOException { return Joiner.on('\n').join( Iterables.concat( Iterables.transform( Sets.union( this.vars(req, type).entrySet(), Sets.newHashSet( Maps.immutableEntry( "container", name.replaceAll("[^a-zA-Z0-9_.-]", "_") ) ) ), new Function<Map.Entry<String, String>, String>() { @Override public String apply( final Map.Entry<String, String> input) { return String.format( "%s=%s", input.getKey(), input.getValue() ); } } ), Collections.singleton(this.asRoot()), Collections.singleton( IOUtils.toString( this.getClass().getResourceAsStream("_head.sh"), CharEncoding.UTF_8 ) ), this.decryptor().commands(), Collections.singleton( IOUtils.toString( this.getClass().getResourceAsStream( String.format("%s.sh", type) ), CharEncoding.UTF_8 ) ) ) ); } /** * Obtain proxy settings and create a Decrypt instance. * @return Decrypt instance. */ private Decrypt decryptor() { return new Decrypt( this.profile, System.getProperty(HOST_KEY, ""), Integer.parseInt(System.getProperty(PORT_KEY, DEFAULT_PORT)) ); } /** * Get start script for as_root config. * @return Script * @throws IOException If fails * @since 1.37 */ private String asRoot() throws IOException { return String.format( "as_root=%b", !this.profile.read().nodes( "/p/entry[@key='docker']/entry[@key='as_root' and .='true']" ).isEmpty() ); } /** * Get variables from script. * @param req Request * @param type Its type * @return Vars * @throws IOException If fails */ private Map<String, String> vars(final XML req, final String type) throws IOException { final ImmutableMap.Builder<String, String> vars = new ImmutableMap.Builder<>(); for (final XML arg : req.nodes("args/arg")) { vars.put(arg.xpath("@name").get(0), arg.xpath("text()").get(0)); } final DockerRun docker = new DockerRun( this.profile, String.format("/p/entry[@key='%s']", type) ); vars.put("vars", docker.envs(vars.build())); final Profile.Defaults def = new Profile.Defaults(this.profile); vars.put( "image", def.text( "/p/entry[@key='docker']/entry[@key='image']", "yegor256/rultor" ) ); vars.put( "squash", def.text( "/p/entry[@key='merge']/entry[@key='squash']", "false" ) ); vars.put( "directory", def.text("/p/entry[@key='docker']/entry[@key='directory']") ); vars.put("scripts", docker.script()); return vars.build(); } }
src/main/java/com/rultor/agents/req/StartsRequest.java
/** * Copyright (c) 2009-2015, rultor.com * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: 1) Redistributions of source code must retain the above * copyright notice, this list of conditions and the following * disclaimer. 2) Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. 3) Neither the name of the rultor.com nor * the names of its contributors may be used to endorse or promote * products derived from this software without specific prior written * permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT * NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL * THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.rultor.agents.req; import com.google.common.base.Function; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.jcabi.aspects.Immutable; import com.jcabi.log.Logger; import com.jcabi.xml.XML; import com.rultor.agents.AbstractAgent; import com.rultor.spi.Profile; import java.io.IOException; import java.util.Collections; import java.util.Map; import lombok.EqualsAndHashCode; import lombok.ToString; import org.apache.commons.io.IOUtils; import org.apache.commons.lang3.CharEncoding; import org.xembly.Directive; import org.xembly.Directives; /** * Merges. * * @author Yegor Bugayenko (yegor@tpc2.com) * @version $Id$ * @since 1.0 */ @Immutable @ToString @EqualsAndHashCode(callSuper = false, of = "profile") public final class StartsRequest extends AbstractAgent { /** * Default port value to be used with Decrypt. */ private static final String DEFAULT_PORT = "80"; /** * HTTP proxy port system property key. */ private static final String PORT_KEY = "http.proxyPort"; /** * HTTP proxy host system property key. */ private static final String HOST_KEY = "http.proxyHost"; /** * Profile. */ private final transient Profile profile; /** * Ctor. * @param prof Profile */ public StartsRequest(final Profile prof) { super( "/talk/request[@id and type and not(success)]", "/talk[not(daemon)]" ); this.profile = prof; } @Override public Iterable<Directive> process(final XML xml) throws IOException { final XML req = xml.nodes("//request").get(0); final String type = req.xpath("type/text()").get(0); final String hash = req.xpath("@id").get(0); String script; try { script = this.script( req, type, xml.xpath("/talk/@name").get(0) ); Logger.info(this, "request %s/%s started", type, hash); } catch (final Profile.ConfigException ex) { script = Logger.format( "cat <<EOT\n%[exception]s\nEOT\nexit -1", ex ); } return new Directives().xpath("/talk") .add("daemon") .attr("id", hash) .add("title").set(type).up() .add("script").set(script); } /** * Make a script. * @param req Request * @param type Its type * @param name Name of talk * @return Script * @throws IOException If fails */ @SuppressWarnings("unchecked") private String script(final XML req, final String type, final String name) throws IOException { return Joiner.on('\n').join( Iterables.concat( Iterables.transform( Sets.union( this.vars(req, type).entrySet(), Sets.newHashSet( Maps.immutableEntry( "container", name.replaceAll("[^a-zA-Z0-9_.-]", "_") ) ) ), new Function<Map.Entry<String, String>, String>() { @Override public String apply( final Map.Entry<String, String> input) { return String.format( "%s=%s", input.getKey(), input.getValue() ); } } ), Collections.singleton(this.asRoot()), Collections.singleton( IOUtils.toString( this.getClass().getResourceAsStream("_head.sh"), CharEncoding.UTF_8 ) ), this.decryptor().commands(), Collections.singleton( IOUtils.toString( this.getClass().getResourceAsStream( String.format("%s.sh", type) ), CharEncoding.UTF_8 ) ) ) ); } /** * Obtain proxy settings and create a Decrypt instance. * @return Decrypt instance. */ private Decrypt decryptor() { return new Decrypt( this.profile, System.getProperty(HOST_KEY, ""), Integer.parseInt(System.getProperty(PORT_KEY, DEFAULT_PORT)) ); } /** * Get start script for as_root config. * @return Script * @throws IOException If fails * @since 1.37 */ private String asRoot() throws IOException { return String.format( "as_root=%b", !this.profile.read().nodes( "/p/entry[@key='docker']/entry[@key='as_root' and .='true']" ).isEmpty() ); } /** * Get variables from script. * @param req Request * @param type Its type * @return Vars * @throws IOException If fails */ private Map<String, String> vars(final XML req, final String type) throws IOException { final ImmutableMap.Builder<String, String> vars = new ImmutableMap.Builder<>(); for (final XML arg : req.nodes("args/arg")) { vars.put(arg.xpath("@name").get(0), arg.xpath("text()").get(0)); } final DockerRun docker = new DockerRun( this.profile, String.format("/p/entry[@key='%s']", type) ); vars.put("vars", docker.envs(vars.build())); final Profile.Defaults def = new Profile.Defaults(this.profile); vars.put( "image", def.text( "/p/entry[@key='docker']/entry[@key='image']", "yegor256/rultor" ) ); vars.put( "squash", def.text( "/p/entry[@key='merge']/entry[@key='squash']", "false" ) ); vars.put( "directory", def.text("/p/entry[@key='docker']/entry[@key='directory']") ); vars.put("scripts", docker.script()); return vars.build(); } }
more logs
src/main/java/com/rultor/agents/req/StartsRequest.java
more logs
Java
mit
435839ef668dacb69d1ca2286ce9534bba392122
0
DruidGreeneyes/rivet-core-extras.java
package rivet.extras.text; import static java.util.Arrays.stream; import rivet.core.labels.ArrayRIV; public final class UntrainedWords { private UntrainedWords(){} public static String[] tokenizeText (String text) { return text.split("\\s+"); } public static ArrayRIV[] rivWords (String[] words, int size, int k) { ArrayRIV[] res = new ArrayRIV[words.length]; for (int i = 0; i < words.length; i++) { ArrayRIV riv = ArrayRIV.generateLabel(size, k, words[i]); res[i] = riv; } return res; } public static ArrayRIV sumArrayRIVs (ArrayRIV[] rivs) { return stream(rivs) .reduce( new ArrayRIV(rivs[0].size()), (i, r) -> i.add(r)); } public static ArrayRIV rivettizeText (String text, int size, int k) { return sumArrayRIVs(rivWords(tokenizeText(text), size, k)); } }
src/rivet/extras/text/UntrainedWords.java
package rivet.extras.text; import static java.util.Arrays.stream; import rivet.core.arraylabels.*; import rivet.core.labels.ArrayRIV; import rivet.core.labels.RandomIndexVector; public final class UntrainedWords { private UntrainedWords(){} public static String[] tokenizeText (String text) { return text.split("\\s+"); } public static ArrayRIV[] rivWords (String[] words, int size, int k) { ArrayRIV[] res = new ArrayRIV[words.length]; for (int i = 0; i < words.length; i++) { ArrayRIV riv = ArrayRIV.generateLabel(size, k, words[i]); res[i] = riv; } return res; } public static ArrayRIV sumArrayRIVs (ArrayRIV[] rivs) { return stream(rivs) .reduce( new ArrayRIV(rivs[0].size()), (i, r) -> i.add(r)); } public static ArrayRIV rivettizeText (String text, int size, int k) { return sumArrayRIVs(rivWords(tokenizeText(text), size, k)); } }
remove bogus dependencies
src/rivet/extras/text/UntrainedWords.java
remove bogus dependencies
Java
mit
b2956dbed4427dbf11edd851085af525458eb455
0
GitHubRGI/swagd,GitHubRGI/swagd,GitHubRGI/swagd,GitHubRGI/swagd
/* Copyright (C) 2014 Reinventing Geospatial, Inc * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>, * or write to the Free Software Foundation, Inc., 59 Temple Place - * Suite 330, Boston, MA 02111-1307, USA. */ package com.rgi.geopackage; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.Set; import org.junit.Assert; import org.junit.Test; import com.rgi.common.BoundingBox; import com.rgi.common.coordinate.Coordinate; import com.rgi.common.coordinate.CoordinateReferenceSystem; import com.rgi.common.coordinate.CrsCoordinate; import com.rgi.common.coordinate.referencesystem.profile.CrsProfile; import com.rgi.common.coordinate.referencesystem.profile.CrsProfileFactory; import com.rgi.common.coordinate.referencesystem.profile.EllipsoidalMercatorCrsProfile; import com.rgi.common.coordinate.referencesystem.profile.GlobalGeodeticCrsProfile; import com.rgi.common.coordinate.referencesystem.profile.SphericalMercatorCrsProfile; import com.rgi.common.util.ImageUtility; import com.rgi.geopackage.GeoPackage.OpenMode; import com.rgi.geopackage.core.SpatialReferenceSystem; import com.rgi.geopackage.tiles.Tile; import com.rgi.geopackage.tiles.TileMatrix; import com.rgi.geopackage.tiles.TileMatrixSet; import com.rgi.geopackage.tiles.TileSet; import com.rgi.geopackage.verification.ConformanceException; import com.rgi.geopackage.verification.VerificationLevel; /** * @author Jenifer Cochran */ @SuppressWarnings("static-method") public class GeoPackageTilesAPITest { private final Random randomGenerator = new Random(); /** * This tests if a GeoPackage can add a tile set successfully without throwing errors. * * @throws SQLException throws when an SQLException occurs * @throws Exception throws if an exception occurs */ @Test public void addTileSet() throws SQLException, Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("pyramid", "title", "tiles", new BoundingBox(0.0, 0.0, 50.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); } final String query = "SELECT table_name FROM gpkg_tile_matrix_set WHERE table_name = 'pyramid';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileName = stmt.executeQuery(query);) { Assert.assertTrue("The GeoPackage did not set the table_name into the gpkg_tile_matrix_set when adding a new set of tiles.", tileName.next()); final String tableName = tileName.getString("table_name"); Assert.assertTrue("The GeoPackage did not insert the correct table name into the gpkg_tile_matrix_set when adding a new set of tiles.", tableName.equals("pyramid")); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when given a null value for tileSetEntry * * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullTileSetEntry() throws Exception { final File testFile = this.getRandomFile(3); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTile(null, gpkg.tiles().getTileMatrix(tileSet, 0), 0, 0, GeoPackageTilesAPITest.createImageBytes()); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null value for tileSetEntry."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when given * a tilesetentry with a null value for the boundingbox * * @throws Exception * throws if an exception occurs * */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullBoundingBox() throws Exception { final File testFile = this.getRandomFile(3); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("tableName", "ident", "desc", null, gpkg.core().getSpatialReferenceSystem(4236)); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null value for BoundingBox."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the Geopackage will throw an IllegalArgumentException * If it gives tries to create a TileSet with a null SRS value * @throws Exception throws when exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullSRS() throws Exception { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("name", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("GeoPackage should have thrown an IllegalArgumentException when TileEntrySet is null."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if the GeoPackage will add a Tile set with a new Spatial Reference System (one created by user). * @throws Exception throws if an exception occurs */ @Test public void addTileSetWithNewSpatialReferenceSystem() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.core().addSpatialReferenceSystem("scaled world mercator", 9804, "org", 9804, "definition", "description"); } final String query = "SELECT srs_name FROM gpkg_spatial_ref_sys "+ "WHERE srs_name = 'scaled world mercator' AND "+ "srs_id = 9804 AND "+ "organization = 'org' AND "+ "definition = 'definition' AND "+ "description = 'description';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet srsInfo = stmt.executeQuery(query);) { Assert.assertTrue("The Spatial Reference System added to the GeoPackage by the user did not contain the same information given.", srsInfo.next()); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if given a GeoPackage with tiles already inside it can add another Tile Set without throwing an error and verify that it entered the correct information. * * @throws Exception throws if an exception occurs */ @Test public void addTileSetToExistingGpkgWithTilesInside() throws Exception { final File testFile = this.getRandomFile(5); //create a geopackage with tiles inside try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetONE", "title", "tiles", new BoundingBox(0.0, 0.0, 60.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); //open a file with tiles inside and add more tiles try(GeoPackage gpkgWithTiles = new GeoPackage(testFile, OpenMode.Open)) { final TileSet tileSetEntry2 = gpkgWithTiles.tiles() .addTileSet("newTileSetTWO", "title2", "tiles", new BoundingBox(0.0, 0.0, 70.0, 50.0), gpkgWithTiles.core().getSpatialReferenceSystem(4326)); final double pixelXSize = (tileSetEntry2.getBoundingBox().getWidth()/matrixWidth)/tileWidth; final double pixelYSize = (tileSetEntry2.getBoundingBox().getHeight()/matrixHeight)/tileHeight; gpkgWithTiles.tiles().addTileMatrix(tileSetEntry2, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); } } //make sure the information was added to contents table and tile matrix set table final String query = "SELECT cnts.table_name FROM gpkg_contents AS cnts WHERE cnts.table_name"+ " IN(SELECT tms.table_name FROM gpkg_tile_matrix_set AS tms WHERE cnts.table_name = tms.table_name);"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileTableNames = stmt.executeQuery(query);) { if(!tileTableNames.next()) { Assert.fail("The two tiles tables where not successfully added to both the gpkg_contents table and the gpkg_tile_matrix_set."); } while (tileTableNames.next()) { final String tilesTableName = tileTableNames.getString("table_name"); Assert.assertTrue("The tiles table names did not match what was being added to the GeoPackage", tilesTableName.equals("newTileSetTWO") || tilesTableName.equals("tileSetONE")); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an error when adding a tileset with the same name as another tileset in the GeoPackage. * * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithRepeatedTileSetName() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("repeated_name", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 2, 2); final TileSet tileSetEntry2 = gpkg.tiles() .addTileSet("repeated_name", "title2", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTileMatrix(tileSetEntry2, 0, 2, 2, 2, 2, 2, 2); Assert.fail("The GeoPackage should throw an IllegalArgumentException when a user gives a Tile Set Name that already exists."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage can add 2 Tile Matrix entries with * the two different tile pyramids can be entered into one gpkg * @throws Exception throws if an exception occurs */ @Test public void addTileSetToExistingTilesTable() throws Exception { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "tiles", "desc", new BoundingBox(0.0, 0.0, 70.0, 70.0), gpkg.core().getSpatialReferenceSystem(4326)); final ArrayList<TileSet> tileSetContnentEntries = new ArrayList<>(); tileSetContnentEntries.add(tileSet); tileSetContnentEntries.add(tileSet); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; final int tileHeight2 = 256; final int tileWidth2 = 256; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); for(final TileSet gpkgEntry : gpkg.tiles().getTileSets()) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", tileSetContnentEntries.stream() .anyMatch(tileEntry -> tileEntry.getBoundingBox().equals(gpkgEntry.getBoundingBox()) && tileEntry.getDataType() .equals(gpkgEntry.getDataType()) && tileEntry.getDescription().equals(gpkgEntry.getDescription()) && tileEntry.getIdentifier() .equals(gpkgEntry.getIdentifier()) && tileEntry.getTableName() .equals(gpkgEntry.getTableName()) && tileEntry.getSpatialReferenceSystemIdentifier().equals(gpkgEntry.getSpatialReferenceSystemIdentifier()))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delte testFile. testFile: %s", testFile)); } } } } /** * This ensures that when a user tries to add the same tileSet two times * that the TileSet object that is returned is the one that already exists * in the GeoPackage and verifies its contents * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test public void addSameTileSetTwice() throws ClassNotFoundException, SQLException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final String tableName = "tableName"; final String identifier = "identifier"; final String description = "description"; final BoundingBox boundingBox = new BoundingBox(2.0,1.0,4.0,3.0); final SpatialReferenceSystem srs = gpkg.core().getSpatialReferenceSystem(0); final TileSet tileSet = gpkg.tiles().addTileSet(tableName, identifier, description, boundingBox, srs); final TileSet sameTileSet = gpkg.tiles().addTileSet(tableName, identifier, description, boundingBox, srs); Assert.assertTrue("The GeoPackage did not return the same tile set when trying to add the same tile set twice.", sameTileSet.equals(tileSet.getTableName(), tileSet.getDataType(), tileSet.getIdentifier(), tileSet.getDescription(), tileSet.getBoundingBox(), tileSet.getSpatialReferenceSystemIdentifier())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadTableName() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", null, gpkg.core().getSpatialReferenceSystem(-1)); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadSRS() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadBoundingBox() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when The table name is an empty string * for TileSet. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetContentEntryInvalidTableName() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when given a TileSet with an empty string for the table name."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name with symbols * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet("badTableName^", "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (with symbols not allowed by GeoPackage)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name starting with gpkg * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet("gpkg_bad_tablename", "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (starting with gpkg_ which is not allowed by GeoPackage)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name with a null value * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet(null, "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (a null value)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will return the same tileSets that was given to the GeoPackage when adding tileSets. * @throws Exception if an exception occurs */ @Test public void getTileSetsFromGpkg() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "tiles", "desc", new BoundingBox(0.0, 0.0, 90.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileSet tileSet2 = gpkg.tiles() .addTileSet("SecondTileSet", "ident", "descrip", new BoundingBox(1.0,1.0,122.0,111.0), gpkg.core().getSpatialReferenceSystem(4326)); final ArrayList<TileSet> tileSetContnentEntries = new ArrayList<>(); tileSetContnentEntries.add(tileSet); tileSetContnentEntries.add(tileSet2); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; gpkg.tiles().addTileMatrix(tileSet2, 1, matrixWidth2, matrixHeight2, tileWidth, tileHeight, (tileSet2.getBoundingBox().getWidth()/matrixWidth2)/tileWidth, (tileSet2.getBoundingBox().getHeight()/matrixHeight2)/tileHeight); final Collection<TileSet> tileSetsFromGpkg = gpkg.tiles().getTileSets(); Assert.assertTrue("The number of tileSets added to a GeoPackage do not match with how many is retrieved from a GeoPacakage.",tileSetContnentEntries.size() == tileSetsFromGpkg.size()); for(final TileSet gpkgEntry : tileSetsFromGpkg) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", tileSetContnentEntries.stream() .anyMatch(tileEntry -> tileEntry.getBoundingBox().equals(gpkgEntry.getBoundingBox()) && tileEntry.getDataType() .equals(gpkgEntry.getDataType()) && tileEntry.getDescription().equals(gpkgEntry.getDescription()) && tileEntry.getIdentifier() .equals(gpkgEntry.getIdentifier()) && tileEntry.getTableName() .equals(gpkgEntry.getTableName()) && tileEntry.getSpatialReferenceSystemIdentifier().equals(gpkgEntry.getSpatialReferenceSystemIdentifier()))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will find no tile Sets when searching with an SRS that is not in the GeoPackage. * @throws Exception throws if an exception occurs */ @Test public void getTileSetWithNewSRS() throws Exception { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { final Collection<TileSet> gpkgTileSets = gpkg.tiles().getTileSets(gpkg.core().addSpatialReferenceSystem("name", 123,"org", 123,"def","desc")); Assert.assertTrue("Should not have found any tile sets because there weren't any in " + "GeoPackage that matched the SpatialReferenceSystem given.", gpkgTileSets.size() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the getTileSet returns null when the tile table does not exist * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileSetVerifyReturnNull()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().getTileSet("table_not_here"); Assert.assertTrue("GeoPackage expected to return null when the tile set does not exist in GeoPackage",tileSet == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the getTileSet returns the expected values. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileSetVerifyReturnCorrectTileSet()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("ttable","identifier", "Desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileSet returnedTileSet = gpkg.tiles().getTileSet("ttable"); Assert.assertTrue("GeoPackage did not return the same values given to tile set", tileSet.getBoundingBox().equals(returnedTileSet.getBoundingBox()) && tileSet.getDescription().equals(returnedTileSet.getDescription()) && tileSet.getDataType() .equals(returnedTileSet.getDataType()) && tileSet.getIdentifier() .equals(returnedTileSet.getIdentifier()) && tileSet.getLastChange() .equals(returnedTileSet.getLastChange()) && tileSet.getTableName() .equals(returnedTileSet.getTableName()) && tileSet.getSpatialReferenceSystemIdentifier().equals(returnedTileSet.getSpatialReferenceSystemIdentifier())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect there are zoom levels for * a tile that is not represented in the Tile Matrix Table. * Should throw a IllegalArgumentException. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 18, 20, 20, 2,2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentExceptionException when Tile Matrix Table " + "does not contain a record for the zoom level of a tile in the Pyramid User Data Table."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_row * is larger than the matrix_height -1. Which is a violation * of the GeoPackage Specifications. Requirement 55. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException2() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 10, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_row " + "is larger than matrix_height - 1 when zoom levels are equal."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_row * is less than 0. Which is a violation * of the GeoPackage Specifications. Requirement 55. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException3() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, -1, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_row " + "is less than 0."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_column * is larger than matrix_width -1. Which is a violation * of the GeoPackage Specifications. Requirement 54. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException4() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet,tileMatrix, 10, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_column " + "is larger than matrix_width -1."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_column * is less than 0. Which is a violation * of the GeoPackage Specifications. Requirement 54. * @throws Exception throws when exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException5() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, -1, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_column " + "is less than 0."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Geopackage throws an SQLException when opening a Geopackage since it does not contain the default tables * inside after bypassing the verifier. * * @throws Exception throws if an exception occurs */ @Test(expected = SQLException.class) public void addTilesToGpkgAndAddTilesAndSetVerifyToFalse() throws Exception { final File testFile = this.getRandomFile(37); testFile.createNewFile(); try(GeoPackage gpkg = new GeoPackage(testFile, VerificationLevel.None, OpenMode.Open)) { gpkg.tiles() .addTileSet("diff_tile_set", "tile", "desc", new BoundingBox(1.0, 1.0, 1.0, 1.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.fail("The GeoPackage was expected to throw an IOException due to the file being empty."); } catch(final IOException ex) { final String query = "SELECT table_name FROM gpkg_contents WHERE table_name = 'diff_tile_set';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileTableName = stmt.executeQuery(query);) { Assert.assertTrue("The data should not be in the contents table since it throws an SQLException", tileTableName.getString("table_name") == null); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This adds a tile to a GeoPackage and verifies that the Tile object added * into the GeoPackage is the same Tile object returned. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if the image from the tile is not able to be read */ @Test public void addTileMethodByCrsTileCoordinate() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, -80.0, 180.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 2; final int matrixWidth = 2; final int matrixHeight = 2; final int tileWidth = 256; final int tileHeight = 256; final double pixelXSize = (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth; final double pixelYSize = (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 4326); final CrsProfile crsProfile = CrsProfileFactory.create(coordinateReferenceSystem); final CrsCoordinate crsCoordinate = new CrsCoordinate(0.0, -60.0, coordinateReferenceSystem); final Tile tileAdded = gpkg.tiles().addTile(tileSet, tileMatrix, crsCoordinate, crsProfile.getPrecision(), GeoPackageTilesAPITest.createImageBytes()); final Tile tileFound = gpkg.tiles().getTile(tileSet, crsCoordinate, crsProfile.getPrecision(), zoomLevel); Assert.assertTrue("The GeoPackage did not return the tile Expected.", tileAdded.getColumn() == tileFound.getColumn() && tileAdded.getIdentifier() == tileFound.getIdentifier() && tileAdded.getRow() == tileFound.getRow() && tileAdded.getZoomLevel() == tileFound.getZoomLevel() && Arrays.equals(tileAdded.getImageData(), tileFound.getImageData())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if the GeoPackage can successfully add non empty tiles to a GeoPackage without throwing an error. * * @throws Exception throws if an exception occurs */ @Test public void addNonEmptyTile() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 20.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 2, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, new byte[] {1, 2, 3, 4}); } //use a query to test if the tile was inserted into database and to correct if the image is the same final String query = "SELECT tile_data FROM tileSetName WHERE zoom_level = 2 AND tile_column = 0 AND tile_row =0;"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileData = stmt.executeQuery(query);) { // assert the image was inputed into the file Assert.assertTrue("The GeoPackage did not successfully write the tile_data into the GeoPackage", tileData.next()); final byte[] bytes = tileData.getBytes("tile_data"); // compare images Assert.assertTrue("The GeoPackage tile_data does not match the tile_data of the one given", Arrays.equals(bytes, new byte[] {1, 2, 3, 4})); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an SQLException when adding a * duplicate tile to the GeoPackage. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IllegalArgumentException * throws if an illegal argument occurs to a method * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = SQLException.class) public void addDuplicateTiles()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "ident", "description", new BoundingBox(1.1,1.1,100.1,100.1), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix matrixSet = gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int column = 1; final int row = 0; final byte[] imageData = new byte[]{1, 2, 3, 4}; //add tile twice gpkg.tiles().addTile(tileSet, matrixSet, column, row, imageData); gpkg.tiles().addTile(tileSet, matrixSet, column, row, imageData);//see if it will add the same tile twice Assert.fail("Expected GeoPackage to throw an SQLException due to a unique constraint violation (zoom level, tile column, and tile row)." + " Was able to add a duplicate tile."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is null (image data) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 4, 10, 10, 1, 1, 1.0, 1.0); gpkg.tiles().addTile(tileSet, tileMatrix1, 4, 0, null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding a null parameter to a Tile object (image data)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is empty (image data) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile2()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 4, 10, 10, 1, 1, 1.0, 1.0); gpkg.tiles().addTile(tileSet,tileMatrix1, 4, 0, new byte[]{}); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding an empty parameter to Tile (image data)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is null (tileMatrix) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile4()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg gpkg.tiles().addTile(tileSet, null, 4, 0, new byte[]{1,2,3,4}); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding a null parameter to a addTile method (tileMatrix)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile() throws Exception { //create tiles and file final File testFile = this.getRandomFile(6); final byte[] originalTile1 = new byte[] {1, 2, 3, 4}; final byte[] originalTile2 = new byte[] {1, 2, 3, 4}; try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 90.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoom1 = 4; final int zoom2 = 8; //add tile to gpkg final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, zoom1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 8; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, zoom2, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); final Coordinate<Integer> tile1 = new Coordinate<>(3, 0); final Coordinate<Integer> tile2 = new Coordinate<>(7, 0); gpkg.tiles().addTile(tileSet, tileMatrix1, tile1.getX(), tile1.getY(), originalTile1); gpkg.tiles().addTile(tileSet, tileMatrix2, tile2.getX(), tile2.getY(), originalTile2); //Retrieve tile from gpkg final Tile gpkgTile1 = gpkg.tiles().getTile(tileSet, tile1.getX(), tile1.getY(), zoom1); final Tile gpkgTile2 = gpkg.tiles().getTile(tileSet, tile2.getX(), tile2.getY(), zoom2); Assert.assertTrue("GeoPackage did not return the image expected when using getTile method.", Arrays.equals(gpkgTile1.getImageData(), originalTile1)); Assert.assertTrue("GeoPackage did not return the image expected when using getTile method.", Arrays.equals(gpkgTile2.getImageData(), originalTile2)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile2() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //Retrieve tile from gpkg final Tile gpkgTile1 = gpkg.tiles().getTile(tileSet, 4, 0, 4); Assert.assertTrue("GeoPackage did not null when the tile doesn't exist in the getTile method.", gpkgTile1 == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile3() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 80.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 3; final int tileHeight = 512; final int tileWidth = 256; final int zoom = 0; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); //Tile coords final Coordinate<Integer> coord1 = new Coordinate<>(2, 1); final byte[] imageData = new byte[]{1,2,3,4}; //Retrieve tile from gpkg final Tile gpkgTileAdded = gpkg.tiles().addTile(tileSet, tileMatrix, coord1.getX(), coord1.getY(), imageData); final Tile gpkgTileRecieved = gpkg.tiles().getTile(tileSet, coord1.getX(), coord1.getY(), zoom); Assert.assertTrue("GeoPackage did not return the same tile added to the gpkg.", gpkgTileAdded.getColumn() == gpkgTileRecieved.getColumn() && gpkgTileAdded.getRow() == gpkgTileRecieved.getRow() && gpkgTileAdded.getIdentifier() ==(gpkgTileRecieved.getIdentifier()) && gpkgTileAdded.getColumn() == gpkgTileRecieved.getColumn() && gpkgTileAdded.getRow() == gpkgTileRecieved.getRow() && gpkgTileAdded.getZoomLevel() == gpkgTileRecieved.getZoomLevel() && Arrays.equals(gpkgTileAdded.getImageData(), gpkgTileRecieved.getImageData())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will return null when the tile being searched for does not exist. * * @throws Exception throws if an exception occurs */ @Test public void getTileThatIsNotInGpkg() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", null, null, new BoundingBox(0.0, 0.0, 80.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 3; final int matrixHeight = 6; final int tileWidth = 256; final int tileHeight = 256; // add tile to gpkg gpkg.tiles().addTileMatrix(tileSet, 2, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); Assert.assertTrue("GeoPackage should have returned null for a missing tile.", gpkg.tiles().getTile(tileSet, 0, 0, 0) == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage will throw an IllegalArgumentException when using getTile method with null value for table name. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void getTileWithNullTileEntrySet() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTile(null, 2, 2, 0); Assert.fail("GeoPackage did not throw an IllegalArgumentException when giving a null value to table name (using getTile method)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This adds a tile to a GeoPackage and verifies that the Tile object added * into the GeoPackage is the same Tile object returned. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if an image cannot be read from or written */ @Test(expected = IllegalArgumentException.class) public void getTileRelativeTileCoordinateNonExistent() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, -80.0, 180.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 2; final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 4326); final CrsCoordinate crsCoordinate = new CrsCoordinate(0.0, -60.0, coordinateReferenceSystem); gpkg.tiles().getTile(tileSet, crsCoordinate, CrsProfileFactory.create(coordinateReferenceSystem).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will return the all and the correct zoom levels in a GeoPackage * * @throws Exception throws if an exception occurs */ @Test public void getZoomLevels() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(5.0,5.0,50.0,50.0), gpkg.core().getSpatialReferenceSystem(4326)); // Add tile matrices that represent zoom levels 0 and 12 final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); gpkg.tiles().addTileMatrix(tileSet, 12, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final Set<Integer> zooms = gpkg.tiles().getTileZoomLevels(tileSet); final ArrayList<Integer> expectedZooms = new ArrayList<>(); expectedZooms.add(new Integer(12)); expectedZooms.add(new Integer(0)); for(final Integer zoom : zooms) { Assert.assertTrue("The GeoPackage's get zoom levels method did not return expected values.", expectedZooms.stream() .anyMatch(currentZoom -> currentZoom.equals(zoom))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when given a * TileSet null for getZoomLevels() * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getZoomLevelsNullTileSetContentEntry()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileZoomLevels(null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when givinga null parameter to getTileZoomLevels"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage will throw an IllegalArgumentException * when giving a null parameter to getRowCount * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void getRowCountNullContentEntry() throws Exception { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.core().getRowCount(null); Assert.fail("GeoPackage should have thrown an IllegalArgumentException."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Verifies that the GeoPackage counts the correct number of rows * with the method getRowCount * @throws Exception throws if an exception occurs */ @Test public void getRowCountVerify() throws Exception { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,80.0,50.0), gpkg.core().getSpatialReferenceSystem(4326)); //create two TileMatrices to represent the tiles final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth2, matrixHeight2, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight); //add two tiles gpkg.tiles().addTile(tileSet, tileMatrix2, 0, 0, new byte[] {1, 2, 3, 4}); gpkg.tiles().addTile(tileSet, tileMatrix1, 0, 0, new byte[] {1, 2, 3, 4}); final long count = gpkg.core().getRowCount(tileSet); Assert.assertTrue(String.format("Expected a different value from GeoPackage on getRowCount. expected: 2 actual: %d", count),count == 2); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a null parameter to the method getTileMatrixSetEntry(); * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatrixSetEntryNullTileSetContentEntry()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrixSet(null); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null parameter for TileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage returns the expected tileMatrices using the * getTIleMatrices(TileSet tileSet) method * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if an image cannot be read from or written */ @Test public void getTileMatricesVerify() throws ClassNotFoundException, SQLException, ConformanceException, IOException { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tables", "identifier", "description", new BoundingBox(0.0,0.0,80.0,80.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 8; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 3, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, GeoPackageTilesAPITest.createImageBytes()); gpkg.tiles().addTile(tileSet, tileMatrix, 1, 0, GeoPackageTilesAPITest.createImageBytes()); final ArrayList<TileMatrix> expectedTileMatrix = new ArrayList<>(); expectedTileMatrix.add(tileMatrix); expectedTileMatrix.add(tileMatrix2); final List<TileMatrix> gpkgTileMatrices = gpkg.tiles().getTileMatrices(tileSet); Assert.assertTrue("Expected the GeoPackage to return two Tile Matrices.",gpkgTileMatrices.size() == 2); for(final TileMatrix gpkgTileMatrix : gpkg.tiles().getTileMatrices(tileSet)) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", expectedTileMatrix.stream() .anyMatch(expectedTM -> expectedTM.getTableName() .equals(gpkgTileMatrix.getTableName()) && expectedTM.getMatrixHeight() == gpkgTileMatrix.getMatrixHeight() && expectedTM.getMatrixWidth() == gpkgTileMatrix.getMatrixWidth() && expectedTM.getPixelXSize() == gpkgTileMatrix.getPixelXSize() && expectedTM.getPixelYSize() == gpkgTileMatrix.getPixelYSize() && expectedTM.getZoomLevel() == gpkgTileMatrix.getZoomLevel())); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will return null if no TileMatrix Entries are * found in the GeoPackage that matches the TileSet given. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatricesNonExistant() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tables", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.assertTrue("Expected the GeoPackage to return null when no tile Matrices are found", gpkg.tiles().getTileMatrices(tileSet).size() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a matrix width that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 0, 5, 6, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a matrix width that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a matrix height that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException2()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("name", "identifier", "description", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 0, 6, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a matrix height that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a tile width that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException3()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 0, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a tile width that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a tile height that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException4()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 0, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a tile height that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a pixelXsize that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException5()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 7, 0, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a pixelXsize that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a pixelYSize that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException6()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 7, 8, 0); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a pixelYSize that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a Geopackage Tiles would throw an IllegalArgumentException when * attempting to add a Tile Matrix corresponding to the same tile set and * zoom level but have differing other fields * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixSameZoomDifferentOtherFields()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 2, 3, 4, 5, 6, 7); gpkg.tiles().addTileMatrix(tileSet, 0, 3, 2, 5, 4, 7, 6); Assert.fail("Expected GeoPackage Tiles to throw an IllegalArgumentException when addint a Tile Matrix with the same tile set and zoom level information but differing other fields"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns the same TileMatrix when trying to add * the same TileMatrix twice (verifies the values are the same) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void addTileMatrixTwiceVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,90.0,90.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); Assert.assertTrue("Expected the GeoPackage to return the existing Tile Matrix.",tileMatrix1.equals(tileMatrix2.getTableName(), tileMatrix2.getZoomLevel(), tileMatrix2.getMatrixWidth(), tileMatrix2.getMatrixHeight(), tileMatrix2.getTileWidth(), tileMatrix2.getTileHeight(), tileMatrix2.getPixelXSize(), tileMatrix2.getPixelYSize())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns the same TileMatrix when trying to add * the same TileMatrix twice (verifies the values are the same) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixNullTileSet()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().addTileMatrix(null, 0, 2, 3, 4, 5, 6, 7); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when giving a null parameter TileSet to addTileMatrix"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when a user * tries to add a negative value for zoom level (when adding a tile Matrix * entry) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixWithNegativeZoomLevel()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(2.0,1.0,4.0,3.0), gpkg.core().getSpatialReferenceSystem(0)); gpkg.tiles().addTileMatrix(tileSet, -1, 2, 4, 6, 8, 10, 12); } finally { if (testFile.exists()) { if (!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if given a non empty tile Matrix Metadata information can be added without throwing an error. * * @throws SQLException throws if an SQLException occurs * @throws Exception throws if an exception occurs */ @Test public void addNonEmptyTileMatrix() throws SQLException, Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { //add information to gpkg final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 80.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 4; final int matrixHeight = 8; final int tileWidth = 256; final int tileHeight = 512; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); } //test if information added is accurate final int matrixWidth = 4; final int matrixHeight = 8; final int tileWidth = 256; final int tileHeight = 512; final String query = String.format("SELECT table_name FROM gpkg_tile_matrix " + "WHERE zoom_level = %d AND " + " matrix_height = %d AND " + " matrix_width = %d AND " + " tile_height = %d AND " + " tile_width = %d;", 1, matrixHeight, matrixWidth, tileHeight, tileWidth); try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tableName = stmt.executeQuery(query);) { Assert.assertTrue("The GeoPackage did not enter the correct record into the gpkg_tile_matrix table", tableName.getString("table_name").equals("tileSetName")); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage Tiles will throw an IllegalArgumentException when the * pixelXSize is not correctly calculated * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixIllegalBounds() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,180.0,90.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 5; final int matrixWidth = 10; final int matrixHeight = 11; final int tileWidth = 256; final int tileHeight = 512; final double pixelXSize = 500.23123;//invalid pixelx size final double pixelYSize = tileSet.getBoundingBox().getHeight()/matrixHeight/tileHeight; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); fail("Expected GeopackageTiles to throw an IllegalArgtumentException when pixelXSize != boundingBoxHeight/matrixHeight/tileHeight."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage Tiles will throw an IllegalArgumentException when the * pixelYSize is not correctly calculated * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixIllegalBounds2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,180.0,90.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 5; final int matrixWidth = 10; final int matrixHeight = 11; final int tileWidth = 256; final int tileHeight = 512; final double pixelXSize = tileSet.getBoundingBox().getWidth()/matrixWidth/tileWidth; final double pixelYSize = 500.23123;//invalid pixel y size gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); fail("Expected GeopackageTiles to throw an IllegalArgtumentException when pixelXSize != boundingBoxWidth/matrixWidth/tileWidth."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a null parameter to getTileMatrices * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatricesNullParameter() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrices(null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when giving getTileMatrices a TileSet that is null."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage getTIleMatrix can retrieve the correct TileMatrix * from the GeoPackage. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,100.0,100.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 6; final int tileHeight = 512; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 1; final int matrixWidth2 = 3; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); final TileMatrix returnedTileMatrix = gpkg.tiles().getTileMatrix(tileSet, 0); Assert.assertTrue("GeoPackage did not return the TileMatrix expected", tileMatrix.getMatrixHeight() == returnedTileMatrix.getMatrixHeight() && tileMatrix.getMatrixWidth() == returnedTileMatrix.getMatrixWidth() && tileMatrix.getPixelXSize() == returnedTileMatrix.getPixelXSize() && tileMatrix.getPixelYSize() == returnedTileMatrix.getPixelYSize() && tileMatrix.getTableName() .equals(returnedTileMatrix.getTableName()) && tileMatrix.getTileHeight() == returnedTileMatrix.getTileHeight() && tileMatrix.getTileWidth() == returnedTileMatrix.getTileWidth() && tileMatrix.getZoomLevel() == returnedTileMatrix.getZoomLevel()); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns null if the TileMatrix entry does not * exist in the GeoPackage file. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixNonExistant()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("TableName", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); Assert.assertTrue("GeoPackage was supposed to return null when there is a nonexistant TileMatrix entry at that zoom level and TileSet", null == gpkg.tiles().getTileMatrix(tileSet, 0)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when * giving a null parameter to getTileMatrix. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatrixNullParameter()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(10); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrix(null, 8); Assert.fail("GeoPackage should have thrown an IllegalArgumentException when giving a null parameter for TileSet in the method getTileMatrix"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if getTileMatrixSet retrieves the values that is expected * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixSetVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { //values for tileMatrixSet final String tableName = "tableName"; final String identifier = "identifier"; final String description = "description"; final BoundingBox bBox = new BoundingBox(2.0, 1.0, 4.0, 3.0); final SpatialReferenceSystem srs = gpkg.core().getSpatialReferenceSystem(4326); //add tileSet and tileMatrixSet to gpkg final TileSet tileSet = gpkg.tiles().addTileSet(tableName, identifier, description, bBox, srs); final TileMatrixSet tileMatrixSet = gpkg.tiles().getTileMatrixSet(tileSet); Assert.assertTrue("Expected different values from getTileMatrixSet for SpatialReferenceSystem or BoundingBox or TableName.", tileMatrixSet.getBoundingBox() .equals(bBox) && tileMatrixSet.getSpatialReferenceSystem().equals(srs) && tileMatrixSet.getTableName() .equals(tableName)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw a GeoPackage Conformance Exception * when given a GeoPackage that violates a requirement with a severity equal * to Error * @throws SQLException throws if an SQLException occurs * @throws Exception throws if an exception occurs */ @Test(expected = ConformanceException.class) public void geoPackageConformanceException() throws SQLException, Exception { final File testFile = this.getRandomFile(19); testFile.createNewFile(); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Open)) { Assert.fail("GeoPackage did not throw a geoPackageConformanceException as expected."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperRightGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-45.234567, 45.213192, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperLeftGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-180, 85, geodeticRefSys);//upper left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerLeftGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-90, 41, geodeticRefSys);//lower left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerRightGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-0.000001, 12, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperLeftGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-45.0, 5.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 0, Expected Column: 0.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperRightGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-42.0, 5.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 0, Expected Column: 1.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerLeftGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-47.0, -45.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 1, Expected Column: 0.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerRightGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(4.999, -55.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 1, Expected Column: 1.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage can translate a crs to a relative tile coordinate * when there are multiple zoom levels and when there are more tiles at the * higher zoom * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateMultipleZoomLevels() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 5; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-27.5, -1.25, geodeticRefSys); final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-100.0, -60.0, 100.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth1 = 16; final int matrixHeight1 = 24; final int pixelXSize = 256; final int pixelYSize = 512; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth1, matrixHeight1, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth1 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight1)/pixelYSize); final int matrixWidth2 = 4; final int matrixHeight2 = 6; final int zoomLevel2 = 3; gpkg.tiles().addTileMatrix(tileSet, zoomLevel2, matrixWidth2, matrixHeight2, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth2 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/pixelYSize); final int matrixWidth3 = 8; final int matrixHeight3 = 12; final int zoomLevel3 = 4; gpkg.tiles().addTileMatrix(tileSet, zoomLevel3, matrixWidth3, matrixHeight3, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth3 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight3)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 12, Expected Column: 5. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 12 && relativeCoord.getX() == 5); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies in the middle of four tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(76.4875, 36.45, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 90.0, 85.05), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 20; final int matrixHeight = 7; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 2, Expected Column: 18. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 3 && relativeCoord.getX() == 18); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies between two tiles on top of each * other * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(10, 25, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the left border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(0, 40, geodeticRefSys);//upper Left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the right border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase4() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(29.9, 30, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the top border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase5() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the bottom border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase6() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 0.01, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if a crsCoordinate can be translated to a tile coordiante * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordianteEdgeCase7()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 0; final CrsCoordinate coordinate = new CrsCoordinate((GlobalGeodeticCrsProfile.Bounds.getMinX()+(2*(GlobalGeodeticCrsProfile.Bounds.getWidth())) / 8), (GlobalGeodeticCrsProfile.Bounds.getMaxY()-(6*(GlobalGeodeticCrsProfile.Bounds.getHeight())) / 9), "epsg", 4326); final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", GlobalGeodeticCrsProfile.Bounds, gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 8; final int matrixHeight = 9; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, coordinate, CrsProfileFactory.create("EPSG", 4326).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 6, Expected Column: 2. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 6 && relativeCoord.getX() == 2); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw the appropriate exception when giving * the method a null value for crsCoordinate. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().crsToTileCoordinate(tileSet, null, CrsProfileFactory.create("EPSG", 4326).getPrecision(), 0); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when trying to input a crs tile coordinate that was null to the method crsToRelativeTileCoordinate."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw the appropriate exception when giving * the method a null value for crsCoordinate. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final int zoomLevel = 1; final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("Police", 99); final CrsCoordinate crsCoord = new CrsCoordinate(15, 20, coordinateReferenceSystem); gpkg.tiles().crsToTileCoordinate(null, crsCoord, 2, zoomLevel); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when trying to input a tileSet that was null to the method crsToRelativeTileCoordinate."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from a different SRS from the tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.fail("Expected the GoePackage to throw an exception when the crs coordinate and the tiles are from two different projections."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from with a zoom level that is not in the matrix table * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException4() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; final int differentZoomLevel = 12; gpkg.tiles().addTileMatrix(tileSet, differentZoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate is not within bounds * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException5() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, -50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from a different SRS from the tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException6() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG", 3857); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.fail("Expected the GoePackage to throw an exception when the crs coordinate and the tiles are from two different projections."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a tileCoordinate can be converted to the correct CRS Coordinate * @throws ClassNotFoundException throws * @throws SQLException throws * @throws ConformanceException throws * @throws IOException throws */ @Test public void tileToCrsCoordinate() throws ClassNotFoundException, SQLException, ConformanceException, IOException { File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { BoundingBox bBox = new BoundingBox(0, 0.0, 180.0,90.0); int row = 3; int column = 5; int zoomLevel = 4; int matrixWidth = 6; int matrixHeight = 4; TileMatrix tileMatrix = createTileSetAndTileMatrix(gpkg, bBox, zoomLevel, matrixWidth, matrixHeight); CrsCoordinate crsCoordReturned = gpkg.tiles().tileToCrsCoordinate(gpkg.tiles().getTileSet(tileMatrix.getTableName()), column, row, zoomLevel); CrsCoordinate crsCoordExpected = new CrsCoordinate(bBox.getMinX() + column*(bBox.getWidth()/matrixWidth), bBox.getMaxY() - row* (bBox.getHeight()/matrixHeight), new GlobalGeodeticCrsProfile().getCoordinateReferenceSystem()); assertCoordinatesEqual(crsCoordReturned, crsCoordExpected); } finally { deleteFile(testFile); } } /** * Tests if a tileCoordinate can be converted to the correct CRS Coordinate * @throws ClassNotFoundException throws * @throws SQLException throws * @throws ConformanceException throws * @throws IOException throws */ @Test public void tileToCrsCoordinate2() throws ClassNotFoundException, SQLException, ConformanceException, IOException { File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { SphericalMercatorCrsProfile spherMercator = new SphericalMercatorCrsProfile(); BoundingBox bBox = new BoundingBox(spherMercator.getBounds().getMinX()/2, spherMercator.getBounds().getMinY()/3, spherMercator.getBounds().getMaxX(), spherMercator.getBounds().getMaxY()/2); int row = 5; int column = 1; int zoomLevel = 4; int matrixWidth = 13; int matrixHeight = 8; SpatialReferenceSystem srs = gpkg.core().addSpatialReferenceSystem(spherMercator.getName(), spherMercator.getCoordinateReferenceSystem().getIdentifier(), spherMercator.getCoordinateReferenceSystem().getAuthority(), spherMercator.getCoordinateReferenceSystem().getIdentifier(), spherMercator.getWellKnownText(), spherMercator.getDescription()); TileMatrix tileMatrix = createTileSetAndTileMatrix(gpkg, srs, bBox, zoomLevel, matrixWidth, matrixHeight, 256, 256, "tableName"); CrsCoordinate crsCoordExpected = new CrsCoordinate(bBox.getMinX() + column*(bBox.getWidth()/matrixWidth), bBox.getMaxY() - row* (bBox.getHeight()/matrixHeight), spherMercator.getCoordinateReferenceSystem()); CrsCoordinate crsCoordReturned = gpkg.tiles().tileToCrsCoordinate(gpkg.tiles().getTileSet(tileMatrix.getTableName()), column, row, zoomLevel); assertCoordinatesEqual(crsCoordReturned, crsCoordExpected); } finally { deleteFile(testFile); } } /** * Tests if a tileCoordinate can be converted to the correct CRS Coordinate * @throws ClassNotFoundException throws * @throws SQLException throws * @throws ConformanceException throws * @throws IOException throws */ @Test public void tileToCrsCoordinate3() throws ClassNotFoundException, SQLException, ConformanceException, IOException { File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { BoundingBox bBox = new BoundingBox(-22.1258, -15.325, 43.125, 78.248); int row = 2; int column = 7; int zoomLevel = 4; int matrixWidth = 13; int matrixHeight = 8; TileMatrix tileMatrix = createTileSetAndTileMatrix(gpkg, bBox, zoomLevel, matrixWidth, matrixHeight); CrsCoordinate crsCoordReturned = gpkg.tiles().tileToCrsCoordinate(gpkg.tiles().getTileSet(tileMatrix.getTableName()), column, row, zoomLevel); CrsCoordinate crsCoordExpected = new CrsCoordinate(bBox.getMinX() + column*(bBox.getWidth()/matrixWidth), bBox.getMaxY() - row* (bBox.getHeight()/matrixHeight), new GlobalGeodeticCrsProfile().getCoordinateReferenceSystem()); assertCoordinatesEqual(crsCoordReturned, crsCoordExpected); } finally { deleteFile(testFile); } } private void assertCoordinatesEqual(CrsCoordinate crsCoordReturned, CrsCoordinate crsCoordExpected) { assertEquals(String.format("The coordinate returned was not the values expected.\n" + "Actual Coordinate: (%f, %f) Crs: %s %d\nReturned Coordinate: (%f, %f) Crs: %s %d", crsCoordReturned.getX(), crsCoordReturned.getY(), crsCoordReturned.getCoordinateReferenceSystem().getAuthority(), crsCoordReturned.getCoordinateReferenceSystem().getIdentifier(), crsCoordExpected.getX(), crsCoordReturned.getY(), crsCoordReturned.getCoordinateReferenceSystem().getAuthority(), crsCoordReturned.getCoordinateReferenceSystem().getIdentifier()), crsCoordExpected, crsCoordReturned); } private void deleteFile(File testFile) { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } private static TileMatrix createTileSetAndTileMatrix(final GeoPackage gpkg, final BoundingBox bBox, final int zoomLevel, final int matrixWidth, final int matrixHeight) throws SQLException { return createTileSetAndTileMatrix(gpkg, gpkg.core().getSpatialReferenceSystem(4326), bBox, zoomLevel, matrixWidth, matrixHeight, 256, 256, "tableName"); } private static TileMatrix createTileSetAndTileMatrix(final GeoPackage gpkg, final SpatialReferenceSystem srs, final BoundingBox bBox, final int zoomLevel, final int matrixWidth, final int matrixHeight, final int tileWidth, final int tileHeight, final String identifierTableName) throws SQLException { //create a tileSet final TileSet tileSet = gpkg.tiles() .addTileSet(identifierTableName, identifierTableName, "description", bBox, srs); //create matrix return gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, bBox.getWidth() / matrixWidth / tileWidth, bBox.getHeight() / matrixHeight / tileHeight); } private static byte[] createImageBytes() throws IOException { return ImageUtility.bufferedImageToBytes(new BufferedImage(256, 256, BufferedImage.TYPE_INT_ARGB), "PNG"); } private String getRanString(final int length) { final String characters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; final char[] text = new char[length]; for (int i = 0; i < length; i++) { text[i] = characters.charAt(this.randomGenerator.nextInt(characters.length())); } return new String(text); } private File getRandomFile(final int length) { File testFile; do { testFile = new File(String.format(FileSystems.getDefault().getPath(this.getRanString(length)).toString() + ".gpkg")); } while (testFile.exists()); return testFile; } private Connection getConnection(final String filePath) throws Exception { Class.forName("org.sqlite.JDBC"); // Register the driver return DriverManager.getConnection("jdbc:sqlite:" + filePath); } }
Geopackage/test/com/rgi/geopackage/GeoPackageTilesAPITest.java
/* Copyright (C) 2014 Reinventing Geospatial, Inc * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>, * or write to the Free Software Foundation, Inc., 59 Temple Place - * Suite 330, Boston, MA 02111-1307, USA. */ package com.rgi.geopackage; import static org.junit.Assert.fail; import java.awt.image.BufferedImage; import java.io.File; import java.io.IOException; import java.nio.file.FileSystems; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Random; import java.util.Set; import org.junit.Assert; import org.junit.Test; import com.rgi.common.BoundingBox; import com.rgi.common.coordinate.Coordinate; import com.rgi.common.coordinate.CoordinateReferenceSystem; import com.rgi.common.coordinate.CrsCoordinate; import com.rgi.common.coordinate.referencesystem.profile.CrsProfile; import com.rgi.common.coordinate.referencesystem.profile.CrsProfileFactory; import com.rgi.common.coordinate.referencesystem.profile.EllipsoidalMercatorCrsProfile; import com.rgi.common.coordinate.referencesystem.profile.GlobalGeodeticCrsProfile; import com.rgi.common.util.ImageUtility; import com.rgi.geopackage.GeoPackage.OpenMode; import com.rgi.geopackage.core.SpatialReferenceSystem; import com.rgi.geopackage.tiles.Tile; import com.rgi.geopackage.tiles.TileMatrix; import com.rgi.geopackage.tiles.TileMatrixSet; import com.rgi.geopackage.tiles.TileSet; import com.rgi.geopackage.verification.ConformanceException; import com.rgi.geopackage.verification.VerificationLevel; /** * @author Jenifer Cochran */ @SuppressWarnings("static-method") public class GeoPackageTilesAPITest { private final Random randomGenerator = new Random(); /** * This tests if a GeoPackage can add a tile set successfully without throwing errors. * * @throws SQLException throws when an SQLException occurs * @throws Exception throws if an exception occurs */ @Test public void addTileSet() throws SQLException, Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("pyramid", "title", "tiles", new BoundingBox(0.0, 0.0, 50.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); } final String query = "SELECT table_name FROM gpkg_tile_matrix_set WHERE table_name = 'pyramid';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileName = stmt.executeQuery(query);) { Assert.assertTrue("The GeoPackage did not set the table_name into the gpkg_tile_matrix_set when adding a new set of tiles.", tileName.next()); final String tableName = tileName.getString("table_name"); Assert.assertTrue("The GeoPackage did not insert the correct table name into the gpkg_tile_matrix_set when adding a new set of tiles.", tableName.equals("pyramid")); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when given a null value for tileSetEntry * * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullTileSetEntry() throws Exception { final File testFile = this.getRandomFile(3); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTile(null, gpkg.tiles().getTileMatrix(tileSet, 0), 0, 0, GeoPackageTilesAPITest.createImageBytes()); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null value for tileSetEntry."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when given * a tilesetentry with a null value for the boundingbox * * @throws Exception * throws if an exception occurs * */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullBoundingBox() throws Exception { final File testFile = this.getRandomFile(3); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("tableName", "ident", "desc", null, gpkg.core().getSpatialReferenceSystem(4236)); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null value for BoundingBox."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the Geopackage will throw an IllegalArgumentException * If it gives tries to create a TileSet with a null SRS value * @throws Exception throws when exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithNullSRS() throws Exception { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("name", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("GeoPackage should have thrown an IllegalArgumentException when TileEntrySet is null."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if the GeoPackage will add a Tile set with a new Spatial Reference System (one created by user). * @throws Exception throws if an exception occurs */ @Test public void addTileSetWithNewSpatialReferenceSystem() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.core().addSpatialReferenceSystem("scaled world mercator", 9804, "org", 9804, "definition", "description"); } final String query = "SELECT srs_name FROM gpkg_spatial_ref_sys "+ "WHERE srs_name = 'scaled world mercator' AND "+ "srs_id = 9804 AND "+ "organization = 'org' AND "+ "definition = 'definition' AND "+ "description = 'description';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet srsInfo = stmt.executeQuery(query);) { Assert.assertTrue("The Spatial Reference System added to the GeoPackage by the user did not contain the same information given.", srsInfo.next()); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if given a GeoPackage with tiles already inside it can add another Tile Set without throwing an error and verify that it entered the correct information. * * @throws Exception throws if an exception occurs */ @Test public void addTileSetToExistingGpkgWithTilesInside() throws Exception { final File testFile = this.getRandomFile(5); //create a geopackage with tiles inside try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetONE", "title", "tiles", new BoundingBox(0.0, 0.0, 60.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); //open a file with tiles inside and add more tiles try(GeoPackage gpkgWithTiles = new GeoPackage(testFile, OpenMode.Open)) { final TileSet tileSetEntry2 = gpkgWithTiles.tiles() .addTileSet("newTileSetTWO", "title2", "tiles", new BoundingBox(0.0, 0.0, 70.0, 50.0), gpkgWithTiles.core().getSpatialReferenceSystem(4326)); final double pixelXSize = (tileSetEntry2.getBoundingBox().getWidth()/matrixWidth)/tileWidth; final double pixelYSize = (tileSetEntry2.getBoundingBox().getHeight()/matrixHeight)/tileHeight; gpkgWithTiles.tiles().addTileMatrix(tileSetEntry2, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); } } //make sure the information was added to contents table and tile matrix set table final String query = "SELECT cnts.table_name FROM gpkg_contents AS cnts WHERE cnts.table_name"+ " IN(SELECT tms.table_name FROM gpkg_tile_matrix_set AS tms WHERE cnts.table_name = tms.table_name);"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileTableNames = stmt.executeQuery(query);) { if(!tileTableNames.next()) { Assert.fail("The two tiles tables where not successfully added to both the gpkg_contents table and the gpkg_tile_matrix_set."); } while (tileTableNames.next()) { final String tilesTableName = tileTableNames.getString("table_name"); Assert.assertTrue("The tiles table names did not match what was being added to the GeoPackage", tilesTableName.equals("newTileSetTWO") || tilesTableName.equals("tileSetONE")); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an error when adding a tileset with the same name as another tileset in the GeoPackage. * * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetWithRepeatedTileSetName() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("repeated_name", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 2, 2); final TileSet tileSetEntry2 = gpkg.tiles() .addTileSet("repeated_name", "title2", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().addTileMatrix(tileSetEntry2, 0, 2, 2, 2, 2, 2, 2); Assert.fail("The GeoPackage should throw an IllegalArgumentException when a user gives a Tile Set Name that already exists."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage can add 2 Tile Matrix entries with * the two different tile pyramids can be entered into one gpkg * @throws Exception throws if an exception occurs */ @Test public void addTileSetToExistingTilesTable() throws Exception { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "tiles", "desc", new BoundingBox(0.0, 0.0, 70.0, 70.0), gpkg.core().getSpatialReferenceSystem(4326)); final ArrayList<TileSet> tileSetContnentEntries = new ArrayList<>(); tileSetContnentEntries.add(tileSet); tileSetContnentEntries.add(tileSet); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; final int tileHeight2 = 256; final int tileWidth2 = 256; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); for(final TileSet gpkgEntry : gpkg.tiles().getTileSets()) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", tileSetContnentEntries.stream() .anyMatch(tileEntry -> tileEntry.getBoundingBox().equals(gpkgEntry.getBoundingBox()) && tileEntry.getDataType() .equals(gpkgEntry.getDataType()) && tileEntry.getDescription().equals(gpkgEntry.getDescription()) && tileEntry.getIdentifier() .equals(gpkgEntry.getIdentifier()) && tileEntry.getTableName() .equals(gpkgEntry.getTableName()) && tileEntry.getSpatialReferenceSystemIdentifier().equals(gpkgEntry.getSpatialReferenceSystemIdentifier()))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delte testFile. testFile: %s", testFile)); } } } } /** * This ensures that when a user tries to add the same tileSet two times * that the TileSet object that is returned is the one that already exists * in the GeoPackage and verifies its contents * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test public void addSameTileSetTwice() throws ClassNotFoundException, SQLException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final String tableName = "tableName"; final String identifier = "identifier"; final String description = "description"; final BoundingBox boundingBox = new BoundingBox(2.0,1.0,4.0,3.0); final SpatialReferenceSystem srs = gpkg.core().getSpatialReferenceSystem(0); final TileSet tileSet = gpkg.tiles().addTileSet(tableName, identifier, description, boundingBox, srs); final TileSet sameTileSet = gpkg.tiles().addTileSet(tableName, identifier, description, boundingBox, srs); Assert.assertTrue("The GeoPackage did not return the same tile set when trying to add the same tile set twice.", sameTileSet.equals(tileSet.getTableName(), tileSet.getDataType(), tileSet.getIdentifier(), tileSet.getDescription(), tileSet.getBoundingBox(), tileSet.getSpatialReferenceSystemIdentifier())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadTableName() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", null, gpkg.core().getSpatialReferenceSystem(-1)); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadSRS() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Expects GeoPackage to throw an IllegalArgumentException when giving * addTileSet a parameter with a null value for bounding box * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileSetBadBoundingBox() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("TableName", "identifier", "definition", new BoundingBox(0.0,0.0,0.0,0.0), null); Assert.fail("Expected an IllegalArgumentException when giving a null value for bounding box for addTileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when The table name is an empty string * for TileSet. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTileSetContentEntryInvalidTableName() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles() .addTileSet("", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when given a TileSet with an empty string for the table name."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name with symbols * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet("badTableName^", "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (with symbols not allowed by GeoPackage)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name starting with gpkg * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet("gpkg_bad_tablename", "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (starting with gpkg_ which is not allowed by GeoPackage)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackageTiles throws an IllegalArgumentException when giving a * table name with a null value * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileIllegalArgumentException3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { gpkg.tiles().addTileSet(null, "identifier", "description", new BoundingBox(0.0,0.0,2.0,2.0), gpkg.core().getSpatialReferenceSystem(0)); fail("Expected to get an IllegalArgumentException for giving an illegal tablename (a null value)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will return the same tileSets that was given to the GeoPackage when adding tileSets. * @throws Exception if an exception occurs */ @Test public void getTileSetsFromGpkg() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "tiles", "desc", new BoundingBox(0.0, 0.0, 90.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileSet tileSet2 = gpkg.tiles() .addTileSet("SecondTileSet", "ident", "descrip", new BoundingBox(1.0,1.0,122.0,111.0), gpkg.core().getSpatialReferenceSystem(4326)); final ArrayList<TileSet> tileSetContnentEntries = new ArrayList<>(); tileSetContnentEntries.add(tileSet); tileSetContnentEntries.add(tileSet2); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; gpkg.tiles().addTileMatrix(tileSet2, 1, matrixWidth2, matrixHeight2, tileWidth, tileHeight, (tileSet2.getBoundingBox().getWidth()/matrixWidth2)/tileWidth, (tileSet2.getBoundingBox().getHeight()/matrixHeight2)/tileHeight); final Collection<TileSet> tileSetsFromGpkg = gpkg.tiles().getTileSets(); Assert.assertTrue("The number of tileSets added to a GeoPackage do not match with how many is retrieved from a GeoPacakage.",tileSetContnentEntries.size() == tileSetsFromGpkg.size()); for(final TileSet gpkgEntry : tileSetsFromGpkg) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", tileSetContnentEntries.stream() .anyMatch(tileEntry -> tileEntry.getBoundingBox().equals(gpkgEntry.getBoundingBox()) && tileEntry.getDataType() .equals(gpkgEntry.getDataType()) && tileEntry.getDescription().equals(gpkgEntry.getDescription()) && tileEntry.getIdentifier() .equals(gpkgEntry.getIdentifier()) && tileEntry.getTableName() .equals(gpkgEntry.getTableName()) && tileEntry.getSpatialReferenceSystemIdentifier().equals(gpkgEntry.getSpatialReferenceSystemIdentifier()))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will find no tile Sets when searching with an SRS that is not in the GeoPackage. * @throws Exception throws if an exception occurs */ @Test public void getTileSetWithNewSRS() throws Exception { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { final Collection<TileSet> gpkgTileSets = gpkg.tiles().getTileSets(gpkg.core().addSpatialReferenceSystem("name", 123,"org", 123,"def","desc")); Assert.assertTrue("Should not have found any tile sets because there weren't any in " + "GeoPackage that matched the SpatialReferenceSystem given.", gpkgTileSets.size() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the getTileSet returns null when the tile table does not exist * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileSetVerifyReturnNull()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().getTileSet("table_not_here"); Assert.assertTrue("GeoPackage expected to return null when the tile set does not exist in GeoPackage",tileSet == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the getTileSet returns the expected values. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileSetVerifyReturnCorrectTileSet()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("ttable","identifier", "Desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileSet returnedTileSet = gpkg.tiles().getTileSet("ttable"); Assert.assertTrue("GeoPackage did not return the same values given to tile set", tileSet.getBoundingBox().equals(returnedTileSet.getBoundingBox()) && tileSet.getDescription().equals(returnedTileSet.getDescription()) && tileSet.getDataType() .equals(returnedTileSet.getDataType()) && tileSet.getIdentifier() .equals(returnedTileSet.getIdentifier()) && tileSet.getLastChange() .equals(returnedTileSet.getLastChange()) && tileSet.getTableName() .equals(returnedTileSet.getTableName()) && tileSet.getSpatialReferenceSystemIdentifier().equals(returnedTileSet.getSpatialReferenceSystemIdentifier())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect there are zoom levels for * a tile that is not represented in the Tile Matrix Table. * Should throw a IllegalArgumentException. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 18, 20, 20, 2,2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentExceptionException when Tile Matrix Table " + "does not contain a record for the zoom level of a tile in the Pyramid User Data Table."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_row * is larger than the matrix_height -1. Which is a violation * of the GeoPackage Specifications. Requirement 55. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException2() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 10, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_row " + "is larger than matrix_height - 1 when zoom levels are equal."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_row * is less than 0. Which is a violation * of the GeoPackage Specifications. Requirement 55. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException3() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, 0, -1, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_row " + "is less than 0."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_column * is larger than matrix_width -1. Which is a violation * of the GeoPackage Specifications. Requirement 54. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException4() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet,tileMatrix, 10, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_column " + "is larger than matrix_width -1."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can detect the tile_column * is less than 0. Which is a violation * of the GeoPackage Specifications. Requirement 54. * @throws Exception throws when exception occurs */ @Test(expected = IllegalArgumentException.class) public void addTilesIllegalArgumentException5() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, 2, 2, 2, 2, 1, 1); gpkg.tiles().addTile(tileSet, tileMatrix, -1, 0, new byte[] {1, 2, 3, 4}); Assert.fail("Geopackage should throw a IllegalArgumentException when tile_column " + "is less than 0."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Geopackage throws an SQLException when opening a Geopackage since it does not contain the default tables * inside after bypassing the verifier. * * @throws Exception throws if an exception occurs */ @Test(expected = SQLException.class) public void addTilesToGpkgAndAddTilesAndSetVerifyToFalse() throws Exception { final File testFile = this.getRandomFile(37); testFile.createNewFile(); try(GeoPackage gpkg = new GeoPackage(testFile, VerificationLevel.None, OpenMode.Open)) { gpkg.tiles() .addTileSet("diff_tile_set", "tile", "desc", new BoundingBox(1.0, 1.0, 1.0, 1.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.fail("The GeoPackage was expected to throw an IOException due to the file being empty."); } catch(final IOException ex) { final String query = "SELECT table_name FROM gpkg_contents WHERE table_name = 'diff_tile_set';"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileTableName = stmt.executeQuery(query);) { Assert.assertTrue("The data should not be in the contents table since it throws an SQLException", tileTableName.getString("table_name") == null); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This adds a tile to a GeoPackage and verifies that the Tile object added * into the GeoPackage is the same Tile object returned. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if the image from the tile is not able to be read */ @Test public void addTileMethodByCrsTileCoordinate() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, -80.0, 180.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 2; final int matrixWidth = 2; final int matrixHeight = 2; final int tileWidth = 256; final int tileHeight = 256; final double pixelXSize = (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth; final double pixelYSize = (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 4326); final CrsProfile crsProfile = CrsProfileFactory.create(coordinateReferenceSystem); final CrsCoordinate crsCoordinate = new CrsCoordinate(0.0, -60.0, coordinateReferenceSystem); final Tile tileAdded = gpkg.tiles().addTile(tileSet, tileMatrix, crsCoordinate, crsProfile.getPrecision(), GeoPackageTilesAPITest.createImageBytes()); final Tile tileFound = gpkg.tiles().getTile(tileSet, crsCoordinate, crsProfile.getPrecision(), zoomLevel); Assert.assertTrue("The GeoPackage did not return the tile Expected.", tileAdded.getColumn() == tileFound.getColumn() && tileAdded.getIdentifier() == tileFound.getIdentifier() && tileAdded.getRow() == tileFound.getRow() && tileAdded.getZoomLevel() == tileFound.getZoomLevel() && Arrays.equals(tileAdded.getImageData(), tileFound.getImageData())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if the GeoPackage can successfully add non empty tiles to a GeoPackage without throwing an error. * * @throws Exception throws if an exception occurs */ @Test public void addNonEmptyTile() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 20.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 2, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, new byte[] {1, 2, 3, 4}); } //use a query to test if the tile was inserted into database and to correct if the image is the same final String query = "SELECT tile_data FROM tileSetName WHERE zoom_level = 2 AND tile_column = 0 AND tile_row =0;"; try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tileData = stmt.executeQuery(query);) { // assert the image was inputed into the file Assert.assertTrue("The GeoPackage did not successfully write the tile_data into the GeoPackage", tileData.next()); final byte[] bytes = tileData.getBytes("tile_data"); // compare images Assert.assertTrue("The GeoPackage tile_data does not match the tile_data of the one given", Arrays.equals(bytes, new byte[] {1, 2, 3, 4})); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an SQLException when adding a * duplicate tile to the GeoPackage. * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IllegalArgumentException * throws if an illegal argument occurs to a method * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = SQLException.class) public void addDuplicateTiles()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "ident", "description", new BoundingBox(1.1,1.1,100.1,100.1), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix matrixSet = gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int column = 1; final int row = 0; final byte[] imageData = new byte[]{1, 2, 3, 4}; //add tile twice gpkg.tiles().addTile(tileSet, matrixSet, column, row, imageData); gpkg.tiles().addTile(tileSet, matrixSet, column, row, imageData);//see if it will add the same tile twice Assert.fail("Expected GeoPackage to throw an SQLException due to a unique constraint violation (zoom level, tile column, and tile row)." + " Was able to add a duplicate tile."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is null (image data) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 4, 10, 10, 1, 1, 1.0, 1.0); gpkg.tiles().addTile(tileSet, tileMatrix1, 4, 0, null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding a null parameter to a Tile object (image data)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is empty (image data) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile2()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 4, 10, 10, 1, 1, 1.0, 1.0); gpkg.tiles().addTile(tileSet,tileMatrix1, 4, 0, new byte[]{}); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding an empty parameter to Tile (image data)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage throws an IllegalArgumentException when trying to * add a tile with a parameter that is null (tileMatrix) * * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws SQLException * if an SQLException occurs * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addBadTile4()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //add tile to gpkg gpkg.tiles().addTile(tileSet, null, 4, 0, new byte[]{1,2,3,4}); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when adding a null parameter to a addTile method (tileMatrix)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile() throws Exception { //create tiles and file final File testFile = this.getRandomFile(6); final byte[] originalTile1 = new byte[] {1, 2, 3, 4}; final byte[] originalTile2 = new byte[] {1, 2, 3, 4}; try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 90.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoom1 = 4; final int zoom2 = 8; //add tile to gpkg final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, zoom1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 8; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, zoom2, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); final Coordinate<Integer> tile1 = new Coordinate<>(3, 0); final Coordinate<Integer> tile2 = new Coordinate<>(7, 0); gpkg.tiles().addTile(tileSet, tileMatrix1, tile1.getX(), tile1.getY(), originalTile1); gpkg.tiles().addTile(tileSet, tileMatrix2, tile2.getX(), tile2.getY(), originalTile2); //Retrieve tile from gpkg final Tile gpkgTile1 = gpkg.tiles().getTile(tileSet, tile1.getX(), tile1.getY(), zoom1); final Tile gpkgTile2 = gpkg.tiles().getTile(tileSet, tile2.getX(), tile2.getY(), zoom2); Assert.assertTrue("GeoPackage did not return the image expected when using getTile method.", Arrays.equals(gpkgTile1.getImageData(), originalTile1)); Assert.assertTrue("GeoPackage did not return the image expected when using getTile method.", Arrays.equals(gpkgTile2.getImageData(), originalTile2)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile2() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(4326)); //Retrieve tile from gpkg final Tile gpkgTile1 = gpkg.tiles().getTile(tileSet, 4, 0, 4); Assert.assertTrue("GeoPackage did not null when the tile doesn't exist in the getTile method.", gpkgTile1 == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage get tile will retrieve the correct tile with get tile method. * @throws Exception throws if an exception occurs */ @Test public void getTile3() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 80.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixHeight = 2; final int matrixWidth = 3; final int tileHeight = 512; final int tileWidth = 256; final int zoom = 0; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); //Tile coords final Coordinate<Integer> coord1 = new Coordinate<>(2, 1); final byte[] imageData = new byte[]{1,2,3,4}; //Retrieve tile from gpkg final Tile gpkgTileAdded = gpkg.tiles().addTile(tileSet, tileMatrix, coord1.getX(), coord1.getY(), imageData); final Tile gpkgTileRecieved = gpkg.tiles().getTile(tileSet, coord1.getX(), coord1.getY(), zoom); Assert.assertTrue("GeoPackage did not return the same tile added to the gpkg.", gpkgTileAdded.getColumn() == gpkgTileRecieved.getColumn() && gpkgTileAdded.getRow() == gpkgTileRecieved.getRow() && gpkgTileAdded.getIdentifier() ==(gpkgTileRecieved.getIdentifier()) && gpkgTileAdded.getColumn() == gpkgTileRecieved.getColumn() && gpkgTileAdded.getRow() == gpkgTileRecieved.getRow() && gpkgTileAdded.getZoomLevel() == gpkgTileRecieved.getZoomLevel() && Arrays.equals(gpkgTileAdded.getImageData(), gpkgTileRecieved.getImageData())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will return null when the tile being searched for does not exist. * * @throws Exception throws if an exception occurs */ @Test public void getTileThatIsNotInGpkg() throws Exception { final File testFile = this.getRandomFile(4); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", null, null, new BoundingBox(0.0, 0.0, 80.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 3; final int matrixHeight = 6; final int tileWidth = 256; final int tileHeight = 256; // add tile to gpkg gpkg.tiles().addTileMatrix(tileSet, 2, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); Assert.assertTrue("GeoPackage should have returned null for a missing tile.", gpkg.tiles().getTile(tileSet, 0, 0, 0) == null); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage will throw an IllegalArgumentException when using getTile method with null value for table name. * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void getTileWithNullTileEntrySet() throws Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTile(null, 2, 2, 0); Assert.fail("GeoPackage did not throw an IllegalArgumentException when giving a null value to table name (using getTile method)"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This adds a tile to a GeoPackage and verifies that the Tile object added * into the GeoPackage is the same Tile object returned. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if an image cannot be read from or written */ @Test(expected = IllegalArgumentException.class) public void getTileRelativeTileCoordinateNonExistent() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(18); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, -80.0, 180.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 2; final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 4326); final CrsCoordinate crsCoordinate = new CrsCoordinate(0.0, -60.0, coordinateReferenceSystem); gpkg.tiles().getTile(tileSet, crsCoordinate, CrsProfileFactory.create(coordinateReferenceSystem).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will return the all and the correct zoom levels in a GeoPackage * * @throws Exception throws if an exception occurs */ @Test public void getZoomLevels() throws Exception { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(5.0,5.0,50.0,50.0), gpkg.core().getSpatialReferenceSystem(4326)); // Add tile matrices that represent zoom levels 0 and 12 final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); gpkg.tiles().addTileMatrix(tileSet, 12, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final Set<Integer> zooms = gpkg.tiles().getTileZoomLevels(tileSet); final ArrayList<Integer> expectedZooms = new ArrayList<>(); expectedZooms.add(new Integer(12)); expectedZooms.add(new Integer(0)); for(final Integer zoom : zooms) { Assert.assertTrue("The GeoPackage's get zoom levels method did not return expected values.", expectedZooms.stream() .anyMatch(currentZoom -> currentZoom.equals(zoom))); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when given a * TileSet null for getZoomLevels() * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getZoomLevelsNullTileSetContentEntry()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileZoomLevels(null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when givinga null parameter to getTileZoomLevels"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage will throw an IllegalArgumentException * when giving a null parameter to getRowCount * @throws Exception throws if an exception occurs */ @Test(expected = IllegalArgumentException.class) public void getRowCountNullContentEntry() throws Exception { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.core().getRowCount(null); Assert.fail("GeoPackage should have thrown an IllegalArgumentException."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Verifies that the GeoPackage counts the correct number of rows * with the method getRowCount * @throws Exception throws if an exception occurs */ @Test public void getRowCountVerify() throws Exception { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "ident", "desc", new BoundingBox(0.0,0.0,80.0,50.0), gpkg.core().getSpatialReferenceSystem(4326)); //create two TileMatrices to represent the tiles final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 4; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth2, matrixHeight2, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight); //add two tiles gpkg.tiles().addTile(tileSet, tileMatrix2, 0, 0, new byte[] {1, 2, 3, 4}); gpkg.tiles().addTile(tileSet, tileMatrix1, 0, 0, new byte[] {1, 2, 3, 4}); final long count = gpkg.core().getRowCount(tileSet); Assert.assertTrue(String.format("Expected a different value from GeoPackage on getRowCount. expected: 2 actual: %d", count),count == 2); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a null parameter to the method getTileMatrixSetEntry(); * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatrixSetEntryNullTileSetContentEntry()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrixSet(null); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a null parameter for TileSet"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage returns the expected tileMatrices using the * getTIleMatrices(TileSet tileSet) method * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * throws if an image cannot be read from or written */ @Test public void getTileMatricesVerify() throws ClassNotFoundException, SQLException, ConformanceException, IOException { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tables", "identifier", "description", new BoundingBox(0.0,0.0,80.0,80.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 4; final int tileHeight = 512; final int tileWidth = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 4; final int matrixWidth2 = 8; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 3, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); gpkg.tiles().addTile(tileSet, tileMatrix, 0, 0, GeoPackageTilesAPITest.createImageBytes()); gpkg.tiles().addTile(tileSet, tileMatrix, 1, 0, GeoPackageTilesAPITest.createImageBytes()); final ArrayList<TileMatrix> expectedTileMatrix = new ArrayList<>(); expectedTileMatrix.add(tileMatrix); expectedTileMatrix.add(tileMatrix2); final List<TileMatrix> gpkgTileMatrices = gpkg.tiles().getTileMatrices(tileSet); Assert.assertTrue("Expected the GeoPackage to return two Tile Matrices.",gpkgTileMatrices.size() == 2); for(final TileMatrix gpkgTileMatrix : gpkg.tiles().getTileMatrices(tileSet)) { Assert.assertTrue("The tile entry's information in the GeoPackage does not match what was originally given to a GeoPackage", expectedTileMatrix.stream() .anyMatch(expectedTM -> expectedTM.getTableName() .equals(gpkgTileMatrix.getTableName()) && expectedTM.getMatrixHeight() == gpkgTileMatrix.getMatrixHeight() && expectedTM.getMatrixWidth() == gpkgTileMatrix.getMatrixWidth() && expectedTM.getPixelXSize() == gpkgTileMatrix.getPixelXSize() && expectedTM.getPixelYSize() == gpkgTileMatrix.getPixelYSize() && expectedTM.getZoomLevel() == gpkgTileMatrix.getZoomLevel())); } } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will return null if no TileMatrix Entries are * found in the GeoPackage that matches the TileSet given. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatricesNonExistant() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tables", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(4326)); Assert.assertTrue("Expected the GeoPackage to return null when no tile Matrices are found", gpkg.tiles().getTileMatrices(tileSet).size() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a matrix width that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 0, 5, 6, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a matrix width that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a matrix height that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException2()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("name", "identifier", "description", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 0, 6, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a matrix height that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a tile width that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException3()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0, 0.0, 0.0, 0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 0, 7, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a tile width that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a tile height that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException4()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 0, 8, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a tile height that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a pixelXsize that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException5()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 7, 0, 9); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a pixelXsize that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a TileMatrix with a pixelYSize that is <=0 * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatricesIllegalArgumentException6()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 4, 5, 6, 7, 8, 0); Assert.fail("Expected GeoPackage to throw an IllegalArgumentException when giving a Tile Matrix a pixelYSize that is <= 0"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a Geopackage Tiles would throw an IllegalArgumentException when * attempting to add a Tile Matrix corresponding to the same tile set and * zoom level but have differing other fields * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixSameZoomDifferentOtherFields()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); gpkg.tiles().addTileMatrix(tileSet, 0, 2, 3, 4, 5, 6, 7); gpkg.tiles().addTileMatrix(tileSet, 0, 3, 2, 5, 4, 7, 6); Assert.fail("Expected GeoPackage Tiles to throw an IllegalArgumentException when addint a Tile Matrix with the same tile set and zoom level information but differing other fields"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns the same TileMatrix when trying to add * the same TileMatrix twice (verifies the values are the same) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void addTileMatrixTwiceVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("name", "identifier", "description", new BoundingBox(0.0,0.0,90.0,90.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 2; final int tileHeight = 256; final int tileWidth = 256; final TileMatrix tileMatrix1 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final TileMatrix tileMatrix2 = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); Assert.assertTrue("Expected the GeoPackage to return the existing Tile Matrix.",tileMatrix1.equals(tileMatrix2.getTableName(), tileMatrix2.getZoomLevel(), tileMatrix2.getMatrixWidth(), tileMatrix2.getMatrixHeight(), tileMatrix2.getTileWidth(), tileMatrix2.getTileHeight(), tileMatrix2.getPixelXSize(), tileMatrix2.getPixelYSize())); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns the same TileMatrix when trying to add * the same TileMatrix twice (verifies the values are the same) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixNullTileSet()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(13); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().addTileMatrix(null, 0, 2, 3, 4, 5, 6, 7); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when giving a null parameter TileSet to addTileMatrix"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when a user * tries to add a negative value for zoom level (when adding a tile Matrix * entry) * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixWithNegativeZoomLevel()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(2.0,1.0,4.0,3.0), gpkg.core().getSpatialReferenceSystem(0)); gpkg.tiles().addTileMatrix(tileSet, -1, 2, 4, 6, 8, 10, 12); } finally { if (testFile.exists()) { if (!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if given a non empty tile Matrix Metadata information can be added without throwing an error. * * @throws SQLException throws if an SQLException occurs * @throws Exception throws if an exception occurs */ @Test public void addNonEmptyTileMatrix() throws SQLException, Exception { final File testFile = this.getRandomFile(5); try(GeoPackage gpkg = new GeoPackage(testFile)) { //add information to gpkg final TileSet tileSet = gpkg.tiles() .addTileSet("tileSetName", "title", "tiles", new BoundingBox(0.0, 0.0, 80.0, 80.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 4; final int matrixHeight = 8; final int tileWidth = 256; final int tileHeight = 512; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); } //test if information added is accurate final int matrixWidth = 4; final int matrixHeight = 8; final int tileWidth = 256; final int tileHeight = 512; final String query = String.format("SELECT table_name FROM gpkg_tile_matrix " + "WHERE zoom_level = %d AND " + " matrix_height = %d AND " + " matrix_width = %d AND " + " tile_height = %d AND " + " tile_width = %d;", 1, matrixHeight, matrixWidth, tileHeight, tileWidth); try(Connection con = this.getConnection(testFile.getAbsolutePath()); Statement stmt = con.createStatement(); ResultSet tableName = stmt.executeQuery(query);) { Assert.assertTrue("The GeoPackage did not enter the correct record into the gpkg_tile_matrix table", tableName.getString("table_name").equals("tileSetName")); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage Tiles will throw an IllegalArgumentException when the * pixelXSize is not correctly calculated * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixIllegalBounds() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,180.0,90.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 5; final int matrixWidth = 10; final int matrixHeight = 11; final int tileWidth = 256; final int tileHeight = 512; final double pixelXSize = 500.23123;//invalid pixelx size final double pixelYSize = tileSet.getBoundingBox().getHeight()/matrixHeight/tileHeight; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); fail("Expected GeopackageTiles to throw an IllegalArgtumentException when pixelXSize != boundingBoxHeight/matrixHeight/tileHeight."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if GeoPackage Tiles will throw an IllegalArgumentException when the * pixelYSize is not correctly calculated * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void addTileMatrixIllegalBounds2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(7); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,180.0,90.0), gpkg.core().getSpatialReferenceSystem(4326)); final int zoomLevel = 5; final int matrixWidth = 10; final int matrixHeight = 11; final int tileWidth = 256; final int tileHeight = 512; final double pixelXSize = tileSet.getBoundingBox().getWidth()/matrixWidth/tileWidth; final double pixelYSize = 500.23123;//invalid pixel y size gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, tileWidth, tileHeight, pixelXSize, pixelYSize); fail("Expected GeopackageTiles to throw an IllegalArgtumentException when pixelXSize != boundingBoxWidth/matrixWidth/tileWidth."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw an IllegalArgumentException when giving * a null parameter to getTileMatrices * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatricesNullParameter() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrices(null); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when giving getTileMatrices a TileSet that is null."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage getTIleMatrix can retrieve the correct TileMatrix * from the GeoPackage. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(6); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("tableName", "identifier", "description", new BoundingBox(0.0,0.0,100.0,100.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixHeight = 2; final int matrixWidth = 6; final int tileHeight = 512; final int tileWidth = 256; gpkg.tiles().addTileMatrix(tileSet, 1, matrixWidth, matrixHeight, tileWidth, tileHeight, (tileSet.getBoundingBox().getWidth()/matrixWidth)/tileWidth, (tileSet.getBoundingBox().getHeight()/matrixHeight)/tileHeight); final int matrixHeight2 = 1; final int matrixWidth2 = 3; final int tileHeight2 = 512; final int tileWidth2 = 256; final TileMatrix tileMatrix = gpkg.tiles().addTileMatrix(tileSet, 0, matrixWidth2, matrixHeight2, tileWidth2, tileHeight2, (tileSet.getBoundingBox().getWidth()/matrixWidth2)/tileWidth2, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/tileHeight2); final TileMatrix returnedTileMatrix = gpkg.tiles().getTileMatrix(tileSet, 0); Assert.assertTrue("GeoPackage did not return the TileMatrix expected", tileMatrix.getMatrixHeight() == returnedTileMatrix.getMatrixHeight() && tileMatrix.getMatrixWidth() == returnedTileMatrix.getMatrixWidth() && tileMatrix.getPixelXSize() == returnedTileMatrix.getPixelXSize() && tileMatrix.getPixelYSize() == returnedTileMatrix.getPixelYSize() && tileMatrix.getTableName() .equals(returnedTileMatrix.getTableName()) && tileMatrix.getTileHeight() == returnedTileMatrix.getTileHeight() && tileMatrix.getTileWidth() == returnedTileMatrix.getTileWidth() && tileMatrix.getZoomLevel() == returnedTileMatrix.getZoomLevel()); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage returns null if the TileMatrix entry does not * exist in the GeoPackage file. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixNonExistant()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile)) { final TileSet tileSet = gpkg.tiles() .addTileSet("TableName", "identifier", "description", new BoundingBox(0.0,0.0,0.0,0.0), gpkg.core().getSpatialReferenceSystem(-1)); Assert.assertTrue("GeoPackage was supposed to return null when there is a nonexistant TileMatrix entry at that zoom level and TileSet", null == gpkg.tiles().getTileMatrix(tileSet, 0)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw an IllegalArgumentException when * giving a null parameter to getTileMatrix. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void getTileMatrixNullParameter()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(10); try(GeoPackage gpkg = new GeoPackage(testFile)) { gpkg.tiles().getTileMatrix(null, 8); Assert.fail("GeoPackage should have thrown an IllegalArgumentException when giving a null parameter for TileSet in the method getTileMatrix"); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if getTileMatrixSet retrieves the values that is expected * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void getTileMatrixSetVerify()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(12); try(GeoPackage gpkg = new GeoPackage(testFile)) { //values for tileMatrixSet final String tableName = "tableName"; final String identifier = "identifier"; final String description = "description"; final BoundingBox bBox = new BoundingBox(2.0, 1.0, 4.0, 3.0); final SpatialReferenceSystem srs = gpkg.core().getSpatialReferenceSystem(4326); //add tileSet and tileMatrixSet to gpkg final TileSet tileSet = gpkg.tiles().addTileSet(tableName, identifier, description, bBox, srs); final TileMatrixSet tileMatrixSet = gpkg.tiles().getTileMatrixSet(tileSet); Assert.assertTrue("Expected different values from getTileMatrixSet for SpatialReferenceSystem or BoundingBox or TableName.", tileMatrixSet.getBoundingBox() .equals(bBox) && tileMatrixSet.getSpatialReferenceSystem().equals(srs) && tileMatrixSet.getTableName() .equals(tableName)); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage will throw a GeoPackage Conformance Exception * when given a GeoPackage that violates a requirement with a severity equal * to Error * @throws SQLException throws if an SQLException occurs * @throws Exception throws if an exception occurs */ @Test(expected = ConformanceException.class) public void geoPackageConformanceException() throws SQLException, Exception { final File testFile = this.getRandomFile(19); testFile.createNewFile(); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Open)) { Assert.fail("GeoPackage did not throw a geoPackageConformanceException as expected."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperRightGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-45.234567, 45.213192, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperLeftGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-180, 85, geodeticRefSys);//upper left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerLeftGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-90, 41, geodeticRefSys);//lower left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Geodetic crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerRightGeodetic() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 1; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-0.000001, 12, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 0.0, 85.0511287798066), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperLeftGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-45.0, 5.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 0, Expected Column: 0.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateUpperRightGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-42.0, 5.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 0, Expected Column: 1.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerLeftGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(-47.0, -45.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 1, Expected Column: 0.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if the GeoPackage can convert an Global Mercator crsCoordinate to a * relative tile coordinate * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateLowerRightGlobalMercator() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final EllipsoidalMercatorCrsProfile mercator = new EllipsoidalMercatorCrsProfile(); final int zoomLevel = 6; final CoordinateReferenceSystem globalMercator = new CoordinateReferenceSystem("EPSG", 3395); final Coordinate<Double> coordInMeters = mercator.fromGlobalGeodetic(new Coordinate<>(4.999, -55.0)); final CrsCoordinate crsMercatorCoord = new CrsCoordinate(coordInMeters.getX(), coordInMeters.getY(), globalMercator); final File testFile = this.getRandomFile(9); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final Coordinate<Double> minBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>(-90.0, -60.0)); final Coordinate<Double> maxBoundingBoxCoord = mercator.fromGlobalGeodetic(new Coordinate<>( 5.0, 10.0)); final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(minBoundingBoxCoord.getX(), minBoundingBoxCoord.getY(), maxBoundingBoxCoord.getX(), maxBoundingBoxCoord.getY()), gpkg.core().addSpatialReferenceSystem("EPSG/World Mercator", 3395, "EPSG", 3395, "definition", "description")); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsMercatorCoord, CrsProfileFactory.create(globalMercator).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The GeoPackage did not return the expected row and column from the conversion crs to relative tile coordiante. " + " \nExpected Row: 1, Expected Column: 1.\nActual Row: %d, Actual Column: %d.", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage can translate a crs to a relative tile coordinate * when there are multiple zoom levels and when there are more tiles at the * higher zoom * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordinateMultipleZoomLevels() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 5; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(-27.5, -1.25, geodeticRefSys); final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-100.0, -60.0, 100.0, 60.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth1 = 16; final int matrixHeight1 = 24; final int pixelXSize = 256; final int pixelYSize = 512; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth1, matrixHeight1, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth1 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight1)/pixelYSize); final int matrixWidth2 = 4; final int matrixHeight2 = 6; final int zoomLevel2 = 3; gpkg.tiles().addTileMatrix(tileSet, zoomLevel2, matrixWidth2, matrixHeight2, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth2 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight2)/pixelYSize); final int matrixWidth3 = 8; final int matrixHeight3 = 12; final int zoomLevel3 = 4; gpkg.tiles().addTileMatrix(tileSet, zoomLevel3, matrixWidth3, matrixHeight3, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth() /matrixWidth3 )/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight3)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 12, Expected Column: 5. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 12 && relativeCoord.getX() == 5); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies in the middle of four tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(76.4875, 36.45, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(-180.0, 0.0, 90.0, 85.05), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 20; final int matrixHeight = 7; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 2, Expected Column: 18. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 3 && relativeCoord.getX() == 18); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies between two tiles on top of each * other * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(10, 25, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the left border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(0, 40, geodeticRefSys);//upper Left tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 0. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 0); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the right border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase4() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(29.9, 30, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the top border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase5() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//upper right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 0, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 0 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests the validity of the transformation of crs to relative tile * coordinate when the crs coordinate lies on the bottom border * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordEdgeCase6() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 0.01, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 1, Expected Column: 1. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 1 && relativeCoord.getX() == 1); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Test if a crsCoordinate can be translated to a tile coordiante * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test public void crsToRelativeTileCoordianteEdgeCase7()throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 0; final CrsCoordinate coordinate = new CrsCoordinate((GlobalGeodeticCrsProfile.Bounds.getMinX()+(2*(GlobalGeodeticCrsProfile.Bounds.getWidth())) / 8), (GlobalGeodeticCrsProfile.Bounds.getMaxY()-(6*(GlobalGeodeticCrsProfile.Bounds.getHeight())) / 9), "epsg", 4326); final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", GlobalGeodeticCrsProfile.Bounds, gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 8; final int matrixHeight = 9; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); final Coordinate<Integer> relativeCoord = gpkg.tiles().crsToTileCoordinate(tileSet, coordinate, CrsProfileFactory.create("EPSG", 4326).getPrecision(), zoomLevel); Assert.assertTrue(String.format("The crsToRelativeTileCoordinate did not return the expected values. " + "\nExpected Row: 6, Expected Column: 2. \nActual Row: %d, Actual Column: %d", relativeCoord.getY(), relativeCoord.getX()), relativeCoord.getY() == 6 && relativeCoord.getX() == 2); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw the appropriate exception when giving * the method a null value for crsCoordinate. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); gpkg.tiles().crsToTileCoordinate(tileSet, null, CrsProfileFactory.create("EPSG", 4326).getPrecision(), 0); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when trying to input a crs tile coordinate that was null to the method crsToRelativeTileCoordinate."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * Tests if a GeoPackage will throw the appropriate exception when giving * the method a null value for crsCoordinate. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException2() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final int zoomLevel = 1; final CoordinateReferenceSystem coordinateReferenceSystem = new CoordinateReferenceSystem("Police", 99); final CrsCoordinate crsCoord = new CrsCoordinate(15, 20, coordinateReferenceSystem); gpkg.tiles().crsToTileCoordinate(null, crsCoord, 2, zoomLevel); Assert.fail("Expected the GeoPackage to throw an IllegalArgumentException when trying to input a tileSet that was null to the method crsToRelativeTileCoordinate."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from a different SRS from the tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException3() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(-1)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.fail("Expected the GoePackage to throw an exception when the crs coordinate and the tiles are from two different projections."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from with a zoom level that is not in the matrix table * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException4() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; final int differentZoomLevel = 12; gpkg.tiles().addTileMatrix(tileSet, differentZoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate is not within bounds * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException5() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG",4326); final CrsCoordinate crsCoord = new CrsCoordinate(20, -50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } /** * This tests that the appropriate exception is thrown when trying to find a * crs coordinate from a different SRS from the tiles. * * @throws SQLException * throws if an SQLException occurs * @throws ClassNotFoundException * if the connection to the database cannot be made * @throws ConformanceException * throws if it does not meet all the requirements * @throws IOException * if an error occurs from reading or writing a Tile or File */ @Test(expected = IllegalArgumentException.class) public void crsToRelativeTileCoordException6() throws SQLException, ClassNotFoundException, ConformanceException, IOException { final int zoomLevel = 15; final CoordinateReferenceSystem geodeticRefSys = new CoordinateReferenceSystem("EPSG", 3857); final CrsCoordinate crsCoord = new CrsCoordinate(20, 50, geodeticRefSys);//lower right tile final File testFile = this.getRandomFile(8); try(GeoPackage gpkg = new GeoPackage(testFile, OpenMode.Create)) { final TileSet tileSet = gpkg.tiles().addTileSet("tableName", "identifier", "description", new BoundingBox(0.0, 0.0, 30.0, 50.0), gpkg.core().getSpatialReferenceSystem(4326)); final int matrixWidth = 2; final int matrixHeight = 2; final int pixelXSize = 256; final int pixelYSize = 256; gpkg.tiles().addTileMatrix(tileSet, zoomLevel, matrixWidth, matrixHeight, pixelXSize, pixelYSize, (tileSet.getBoundingBox().getWidth()/matrixWidth)/pixelXSize, (tileSet.getBoundingBox().getHeight()/matrixHeight)/pixelYSize); gpkg.tiles().crsToTileCoordinate(tileSet, crsCoord, CrsProfileFactory.create(geodeticRefSys).getPrecision(), zoomLevel); Assert.fail("Expected the GoePackage to throw an exception when the crs coordinate and the tiles are from two different projections."); } finally { if(testFile.exists()) { if(!testFile.delete()) { throw new RuntimeException(String.format("Unable to delete testFile. testFile: %s", testFile)); } } } } private static byte[] createImageBytes() throws IOException { return ImageUtility.bufferedImageToBytes(new BufferedImage(256, 256, BufferedImage.TYPE_INT_ARGB), "PNG"); } private String getRanString(final int length) { final String characters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"; final char[] text = new char[length]; for (int i = 0; i < length; i++) { text[i] = characters.charAt(this.randomGenerator.nextInt(characters.length())); } return new String(text); } private File getRandomFile(final int length) { File testFile; do { testFile = new File(String.format(FileSystems.getDefault().getPath(this.getRanString(length)).toString() + ".gpkg")); } while (testFile.exists()); return testFile; } private Connection getConnection(final String filePath) throws Exception { Class.forName("org.sqlite.JDBC"); // Register the driver return DriverManager.getConnection("jdbc:sqlite:" + filePath); } }
#107 Testing GeoPackageTiles tileToCrsCoordinate
Geopackage/test/com/rgi/geopackage/GeoPackageTilesAPITest.java
#107 Testing GeoPackageTiles tileToCrsCoordinate
Java
mit
e7d3c8f2b81faf0a98f9bc338bf1d32ce275023e
0
bkromhout/Ruqus
package com.bkromhout.ruqus; import android.content.Context; import android.support.v4.content.ContextCompat; import io.realm.RealmObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Access to Ruqus information. This class mostly serves as a convenience class, using the instances of {@link * ClassData} and {@link TransformerData} objects that it holds in order to provide static methods which allow the rest * of the library to make on-liner calls as much as possible. */ public class Ruqus { private static final String KEY_SEP = "$"; private static final String FLAT_SEP = "."; private static final String VIS_FLAT_SEP = ">"; static int LIGHT_TEXT_COLOR, DARK_TEXT_COLOR, LIGHT_CARD_COLOR, DARK_CARD_COLOR; static String CHOOSE_FIELD, CHOOSE_CONDITIONAL; /** * Whether or not Ruqus.init() has already been called. */ private static Ruqus INSTANCE = null; /** * Ruqus class information. */ private ClassData classData; /** * Ruqus transformer information. */ private TransformerData transformerData; /** * Used to cache information to speed up converting a flat visible field name string to a flat real field name * string. */ private HashMap<String, String> flatVisFieldToFlatField; /** * Used to cache information to speed up converting a flat real field name to a field type. */ private HashMap<String, FieldType> flatFieldToFieldType; private Ruqus() { // Load the Ruqus class data object. try { classData = (ClassData) Class.forName(C.GEN_PKG_PREFIX + C.GEN_CLASS_DATA_CLASS_NAME).newInstance(); } catch (ClassNotFoundException e) { throw ex("Could not find generated Ruqus class data, did the annotation processor run?"); } catch (Exception e) { throw ex("Could not get generated Ruqus class data."); } // Load the Ruqus transformer data object. Make sure that we look through all of them so that all of the // transformers' data are loaded into the base class's static variables. The reason there are more than // one is due to there being at least one which comes with Ruqus (for the transformers which come with // Ruqus), plus any more which are generated when the app compiles due to the dev creating their own. int num = 1; boolean noneYet = true; while (true) { try { transformerData = (TransformerData) Class.forName(C.GEN_PKG_PREFIX + C.GEN_TRANSFORMER_DATA_CLASS_NAME + String.valueOf(num)).newInstance(); noneYet = false; num++; } catch (ClassNotFoundException e) { if (noneYet) throw ex("Could not find generated Ruqus transformer data, did the annotation processor run?"); else break; } catch (Exception e) { if (noneYet) throw ex("Could not get generated Ruqus transformer data."); else break; } } // Create hashmaps to use for caching. flatVisFieldToFlatField = new HashMap<>(); flatFieldToFieldType = new HashMap<>(); } /** * Initializes Ruqus. This must be called <i>once</i> before any of the other methods on {@link Ruqus} can be used, * and it requires reflection. It is recommended that it be called as early as possible in the application's * lifecycle. */ public static void init(Context context) { if (INSTANCE == null) { INSTANCE = new Ruqus(); LIGHT_TEXT_COLOR = ContextCompat.getColor(context, R.color.ruqus_textColorPrimaryLight); DARK_TEXT_COLOR = ContextCompat.getColor(context, R.color.ruqus_textColorPrimaryDark); LIGHT_CARD_COLOR = ContextCompat.getColor(context, R.color.cardview_light_background); DARK_CARD_COLOR = ContextCompat.getColor(context, R.color.cardview_dark_background); CHOOSE_FIELD = context.getString(R.string.ruqus_choose_field); CHOOSE_CONDITIONAL = context.getString(R.string.ruqus_choose_conditional); } } /** * Ensures that {@link Ruqus#init(Context)} has been called. */ private static void ensureInit() { if (INSTANCE == null) throw ex("Ruqus.init() must be called first."); } static ClassData getClassData() { ensureInit(); return INSTANCE.classData; } static FieldData getFieldData(String realmClass) { return getClassData().getFieldData(realmClass); } static TransformerData getTransformerData() { ensureInit(); return INSTANCE.transformerData; } /** * Check whether or not Ruqus has data for a RealmObject subclass with the name {@code realmClass}. * @param realmClass Name to check for. * @return True if Ruqus knows of a class called {@code realmClass}, otherwise false. */ static boolean knowsOfClass(String realmClass) { return getClassData().isValidName(realmClass); } /** * Check that Ruqus recognizes and has data for a RealmObject subclass {@code clazz}. * @param realmClass A RealmObject subclass. * @return True if we know about the class, otherwise false. */ static boolean knowsOfClass(Class<? extends RealmObject> realmClass) { return getClassData().isValidClass(realmClass); } /** * Check that Ruqus recognizes and has data for a transformer called {@code transformer}. * @param transformer Name to check for. * @return True if Ruqus knows of a transformer called {@code transformer}, otherwise false. */ static boolean knowsOfTransformer(String transformer) { return getTransformerData().isValidName(transformer); } /** * Translate the visible name of a RealmObject subclass to its real name. * @param visibleName Visible name of a RealmObject subclass. * @return Real name of the RealmObject subclass with the given {@code visibleName}. */ static String classNameFromVisibleName(String visibleName) { ClassData classData = getClassData(); return classData.getNames().get(classData.getVisibleNames(false).indexOf(visibleName)); } /** * Get the actual class object for the RealmObject subclass with the given real name. * @param realmClassName Name of a RealmObject subclass. * @return Class object whose name is {@code realmClassName}. */ static Class<? extends RealmObject> getClassFromName(String realmClassName) { ClassData classData = getClassData(); return classData.getClassObj(realmClassName); } /** * Check if {@code realmClass} is marked is Queryable. * @param realmClass Class to check. * @return True if class is queryable, otherwise false. */ static boolean isClassQueryable(Class<? extends RealmObject> realmClass) { ClassData classData = getClassData(); return classData.isQueryable(realmClass); } /** * Check if a {@code realmClass} has a given {@code field}. This will drill down linked fields, checking all of them * along the way. * @param realmClass Name of the RealmObject subclass to check. * @param field Name of the field to check for. * @return True if {@code realmClass} has {@code field}. */ static boolean classHasField(String realmClass, String field) { if (field == null || field.isEmpty()) throw ex("field cannot be non-null or empty."); ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); // Split field name up so that we can drill down to the end of any linked fields. String[] fieldParts = field.split("\\Q" + FLAT_SEP + "\\E"); for (String fieldPart : fieldParts) { // Make sure we have this field part. if (!fieldData.hasField(fieldPart)) return false; // Now, if the field type is RealmObject or RealmList, we'll need to drill down. if (fieldData.isRealmListType(fieldPart) || fieldData.isRealmObjectType(fieldPart)) { // Try to get it as a realm list type. Class clazz = fieldData.realmListType(fieldPart); // If that doesn't work, do it the normal way. if (clazz == null) clazz = fieldData.fieldType(fieldPart); // Either way, we now have something which extends RealmObject. Get the field data for that object so // that we can check the next part of the link field in the next iteration. // noinspection unchecked fieldData = classData.getFieldData(clazz); continue; } // If it isn't, we can return true, because we already checked that we have it. return true; } throw ex("Couldn't verify if \"%\" has field \"%s\".", realmClass, field); } /** * Get the enum type of a [flat-]field's type. If this is a flat-field (e.g., the immediate type on the class is a * RealmObject subclass or a RealmList of such), this will drill down to the end of the flat-field to get the type * from the end of it. * <p/> * For example, if {@code field} is something like "age", and the type for it in {@code realmClass} is Integer, * that's what would be returned.<br>But if instead {@code field} was something like "dog.age", where the immediate * type is a class called "{@code Dog}" which extends RealmObject and has an Integer field called "age", this method * would drill down and find that information, and still return Integer. * <p/> * Caches values for quicker future access. * @param realmClass Name of RealmObject subclass which contains the {@code field}. * @param field Name of the field whose type is being retrieved. * @return Enum type of the field at the end of a flat-field. */ static FieldType typeEnumForField(String realmClass, String field) { if (field == null || field.isEmpty()) throw ex("field cannot be non-null or empty."); ensureInit(); String key = realmClass + KEY_SEP + field; if (INSTANCE.flatFieldToFieldType.containsKey(key)) return INSTANCE.flatFieldToFieldType.get(key); else { ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); // Split field name up so that we can drill down to the end of any linked fields. String[] fieldParts = field.split("\\Q" + FLAT_SEP + "\\E"); Class fieldTypeClazz = null; for (String fieldPart : fieldParts) { // Try to get it as a realm list type. fieldTypeClazz = fieldData.realmListType(fieldPart); // If that doesn't work, do it the normal way. if (fieldTypeClazz == null) fieldTypeClazz = fieldData.fieldType(fieldPart); // If that still didn't work, we have an issue. if (fieldTypeClazz == null) throw ex("Couldn't get type for \"%s\" on \"%s\".", field, realmClass); // Now, check to see if this type is a subclass of RealmObject. if (RealmObject.class.isAssignableFrom(fieldTypeClazz)) { // It is, so we need to get the field data for that type, and we'll try again in the next iteration. // noinspection unchecked fieldData = classData.getFieldData((Class<? extends RealmObject>) fieldTypeClazz); } } FieldType fieldType = FieldType.fromClazz(fieldTypeClazz); INSTANCE.flatFieldToFieldType.put(key, fieldType); return fieldType; } } /** * Return a list of visible names for all fields on the given RealmObject subclass, but for any fields whose types * are either also RealmObject subclass or RealmList, add entries for their fields as well. * <p/> * TODO something is broke here. * @param realmClass Name of the RealmObject subclass. * @return List of visible flat field names. */ static ArrayList<String> visibleFlatFieldsForClass(String realmClass) { ClassData classData = getClassData(); return _visibleFlatFieldsForClass(classData, classData.getFieldData(realmClass), ""); } private static ArrayList<String> _visibleFlatFieldsForClass(ClassData classData, FieldData fieldData, String prepend) { ArrayList<String> vNames = new ArrayList<>(); // Loop through real names. for (String name : fieldData.getFieldNames()) { // Get visible name of field. String visibleName = fieldData.visibleNameOf(name); // Do something different based on field's type. if (fieldData.isRealmListType(name)) { // Field type is RealmList, recurse and get its visible names as well. vNames.addAll(_visibleFlatFieldsForClass(classData, classData.getFieldData(fieldData.realmListType(name)), prepend + VIS_FLAT_SEP + visibleName)); } else if (fieldData.isRealmObjectType(name)) { // Field type is RealmObject, recurse and get its visible names as well. //noinspection unchecked vNames.addAll(_visibleFlatFieldsForClass(classData, classData.getFieldData((Class<? extends RealmObject>) fieldData.fieldType(name)), prepend + VIS_FLAT_SEP + visibleName)); } else { // Normal field, just add its visible name. vNames.add(visibleName); } } return vNames; } /** * Takes a visible flat field name and converts it to a real flat field name. * @param realmClass Name of the RealmObject subclass. * @param visibleFieldName Visible flat field name. * @return Real flat field name. */ static String fieldFromVisibleField(String realmClass, String visibleFieldName) { ensureInit(); String key = realmClass + KEY_SEP + visibleFieldName; // Try to get cached value first. if (INSTANCE.flatVisFieldToFlatField.containsKey(key)) return INSTANCE.flatVisFieldToFlatField.get(key); // If not cached, must go figure it out. ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); String[] parts = visibleFieldName.split("\\Q" + VIS_FLAT_SEP + "\\E"); StringBuilder builder = new StringBuilder(); for (int i = 0; i < parts.length; i++) { // Use the field data to get the real name of the field. String realFieldName = fieldData.getFieldNames().get(fieldData.getVisibleNames().indexOf(parts[i])); // Append the real name. builder.append(realFieldName); if (i < parts.length - 1) { // This is a RealmObject/RealmList-type field. Append a dot and switch the field data. builder.append(FLAT_SEP); if (fieldData.isRealmListType(realFieldName)) { fieldData = classData.getFieldData(fieldData.realmListType(realFieldName)); } else { //noinspection unchecked fieldData = classData.getFieldData( (Class<? extends RealmObject>) fieldData.fieldType(realFieldName)); } } } // Cache this before returning it. String value = builder.toString(); INSTANCE.flatVisFieldToFlatField.put(key, realmClass + KEY_SEP + value); return value; } /** * Takes a real flat field name and converts it to a visible flat field name. * @param realmClass Name of the RealmObject subclass. * @param field Real flat field name. * @return Visible flat field name. */ static String visibleFieldFromField(String realmClass, String field) { ensureInit(); String value = realmClass + "$" + field; // Try to get cached key first. if (INSTANCE.flatVisFieldToFlatField.containsValue(value)) { for (Map.Entry<String, String> entry : INSTANCE.flatVisFieldToFlatField.entrySet()) { if (entry.getValue().equals(value)) return entry.getKey(); } } // If not cached, must go figure it out. StringBuilder builder = new StringBuilder(); String className = realmClass; String[] parts = field.split("\\Q" + FLAT_SEP + "\\E"); for (int i = 0; i < parts.length; i++) { if (i != parts.length - 1) { // Not at the end of the link yet. className = parts[i]; builder.append(INSTANCE.classData.visibleNameOf(parts[i])) .append(VIS_FLAT_SEP); } else { // This is the end of the link. builder.append(getFieldData(className).visibleNameOf(parts[i])); } } // Cache this before returning it. String key = builder.toString(); INSTANCE.flatVisFieldToFlatField.put(realmClass + KEY_SEP + key, value); return key; } /** * Check if a {@code field} of a {@code realmClass} is of the given {@code type}. (Note that this method uses {@link * Class#isAssignableFrom(Class)} to check if the given {@code type} can be used for the field; that is, {@code * type} may be a subclass of the field's actual type. * @param realmClass Name of RealmObject subclass which contains the {@code field}. * @param field Name of the field whose type is being checked. * @param type Type class. * @return True if {@code type} is assignable to {@code field}'s actual type. */ static boolean fieldIsOfType(String realmClass, String field, FieldType type) { FieldData fieldData = getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); Class<?> actualType = fieldData.fieldType(field); if (actualType == null) throw ex("\"%s\" is not a valid field name for the class \"%s\".", field, realmClass); return actualType.isAssignableFrom(type.getClazz()); } /** * Gets the real name of a transformer class whose visible name is {@code visibleTransName}. * @param visibleTransName Visible name of transformer. * @param isNoArgs Whether the transformer is a no-args transformer or not. * @return Real name of transformer. */ static String transformerNameFromVisibleName(String visibleTransName, boolean isNoArgs) { TransformerData transformerData = getTransformerData(); return isNoArgs ? transformerData.getNoArgNames().get(transformerData.getVisibleNoArgNames().indexOf(visibleTransName)) : transformerData.getNames().get(transformerData.getVisibleNames().indexOf(visibleTransName)); } /** * Gets the number of arguments which the transformer whose name is {@code transformerName} accepts. * @param transformerName Real name of transformer. * @return Number of arguments accepted. May be {@link C#VAR_ARGS}, which equates to -1. */ static int numberOfArgsFor(String transformerName) { return getTransformerData().numArgsOf(transformerName); } /** * Whether or not the transformer whose name is {@code transformerName} accepts the given {@code type}. * @param transformerName Real name of a normal transformer. * @param type Type to check for. * @return True if the transformer with the given name accepts the given type, otherwise false. */ static boolean transformerAcceptsType(String transformerName, Class type) { return getTransformerData().acceptsType(transformerName, type); } /** * Get an instance of the transformer whose real name is {@code transformerName}. * @param transformerName Fully-qualified name of the transformer class to get an instance of. * @return Instance of the transformer class with the given name. */ static RUQTransformer getTransformer(String transformerName) { return getTransformerData().getTransformer(transformerName); } /** * Convenience method for throwing an IllegalArgumentException with a formatted string. */ private static IllegalArgumentException ex(String format, Object... args) { return new IllegalArgumentException(String.format(format, args)); } }
ruqus-core/src/main/java/com/bkromhout/ruqus/Ruqus.java
package com.bkromhout.ruqus; import android.content.Context; import android.support.v4.content.ContextCompat; import io.realm.RealmObject; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; /** * Access to Ruqus information. This class mostly serves as a convenience class, using the instances of {@link * ClassData} and {@link TransformerData} objects that it holds in order to provide static methods which allow the rest * of the library to make on-liner calls as much as possible. */ public class Ruqus { private static final String FLAT_SEP = "."; private static final String VIS_FLAT_SEP = "VIS_FLAT_SEP"; static int LIGHT_TEXT_COLOR, DARK_TEXT_COLOR, LIGHT_CARD_COLOR, DARK_CARD_COLOR; static String CHOOSE_FIELD, CHOOSE_CONDITIONAL; /** * Whether or not Ruqus.init() has already been called. */ private static Ruqus INSTANCE = null; /** * Ruqus class information. */ private ClassData classData; /** * Ruqus transformer information. */ private TransformerData transformerData; /** * Used to cache information to speed up converting a flat visible field name string to a flat real field name * string. */ private HashMap<String, String> flatVisFieldToFlatField; /** * Used to cache information to speed up converting a flat real field name to a field type. */ private HashMap<String, FieldType> flatFieldToFieldType; private Ruqus() { // Load the Ruqus class data object. try { classData = (ClassData) Class.forName(C.GEN_PKG_PREFIX + C.GEN_CLASS_DATA_CLASS_NAME).newInstance(); } catch (ClassNotFoundException e) { throw ex("Could not find generated Ruqus class data, did the annotation processor run?"); } catch (Exception e) { throw ex("Could not get generated Ruqus class data."); } // Load the Ruqus transformer data object. Make sure that we look through all of them so that all of the // transformers' data are loaded into the base class's static variables. The reason there are more than // one is due to there being at least one which comes with Ruqus (for the transformers which come with // Ruqus), plus any more which are generated when the app compiles due to the dev creating their own. int num = 1; boolean noneYet = true; while (true) { try { transformerData = (TransformerData) Class.forName(C.GEN_PKG_PREFIX + C.GEN_TRANSFORMER_DATA_CLASS_NAME + String.valueOf(num)).newInstance(); noneYet = false; num++; } catch (ClassNotFoundException e) { if (noneYet) throw ex("Could not find generated Ruqus transformer data, did the annotation processor run?"); else break; } catch (Exception e) { if (noneYet) throw ex("Could not get generated Ruqus transformer data."); else break; } } // Create hashmaps to use for caching. flatVisFieldToFlatField = new HashMap<>(); flatFieldToFieldType = new HashMap<>(); } /** * Initializes Ruqus. This must be called <i>once</i> before any of the other methods on {@link Ruqus} can be used, * and it requires reflection. It is recommended that it be called as early as possible in the application's * lifecycle. */ public static void init(Context context) { if (INSTANCE == null) { INSTANCE = new Ruqus(); LIGHT_TEXT_COLOR = ContextCompat.getColor(context, R.color.ruqus_textColorPrimaryLight); DARK_TEXT_COLOR = ContextCompat.getColor(context, R.color.ruqus_textColorPrimaryDark); LIGHT_CARD_COLOR = ContextCompat.getColor(context, R.color.cardview_light_background); DARK_CARD_COLOR = ContextCompat.getColor(context, R.color.cardview_dark_background); CHOOSE_FIELD = context.getString(R.string.ruqus_choose_field); CHOOSE_CONDITIONAL = context.getString(R.string.ruqus_choose_conditional); } } /** * Ensures that {@link Ruqus#init(Context)} has been called. */ private static void ensureInit() { if (INSTANCE == null) throw ex("Ruqus.init() must be called first."); } static ClassData getClassData() { ensureInit(); return INSTANCE.classData; } static FieldData getFieldData(String realmClass) { return getClassData().getFieldData(realmClass); } static TransformerData getTransformerData() { ensureInit(); return INSTANCE.transformerData; } /** * Check whether or not Ruqus has data for a RealmObject subclass with the name {@code realmClass}. * @param realmClass Name to check for. * @return True if Ruqus knows of a class called {@code realmClass}, otherwise false. */ static boolean knowsOfClass(String realmClass) { return getClassData().isValidName(realmClass); } /** * Check that Ruqus recognizes and has data for a RealmObject subclass {@code clazz}. * @param realmClass A RealmObject subclass. * @return True if we know about the class, otherwise false. */ static boolean knowsOfClass(Class<? extends RealmObject> realmClass) { return getClassData().isValidClass(realmClass); } /** * Check that Ruqus recognizes and has data for a transformer called {@code transformer}. * @param transformer Name to check for. * @return True if Ruqus knows of a transformer called {@code transformer}, otherwise false. */ static boolean knowsOfTransformer(String transformer) { return getTransformerData().isValidName(transformer); } /** * Translate the visible name of a RealmObject subclass to its real name. * @param visibleName Visible name of a RealmObject subclass. * @return Real name of the RealmObject subclass with the given {@code visibleName}. */ static String classNameFromVisibleName(String visibleName) { ClassData classData = getClassData(); return classData.getNames().get(classData.getVisibleNames(false).indexOf(visibleName)); } /** * Get the actual class object for the RealmObject subclass with the given real name. * @param realmClassName Name of a RealmObject subclass. * @return Class object whose name is {@code realmClassName}. */ static Class<? extends RealmObject> getClassFromName(String realmClassName) { ClassData classData = getClassData(); return classData.getClassObj(realmClassName); } /** * Check if {@code realmClass} is marked is Queryable. * @param realmClass Class to check. * @return True if class is queryable, otherwise false. */ static boolean isClassQueryable(Class<? extends RealmObject> realmClass) { ClassData classData = getClassData(); return classData.isQueryable(realmClass); } /** * Check if a {@code realmClass} has a given {@code field}. This will drill down linked fields, checking all of them * along the way. * @param realmClass Name of the RealmObject subclass to check. * @param field Name of the field to check for. * @return True if {@code realmClass} has {@code field}. */ static boolean classHasField(String realmClass, String field) { if (field == null || field.isEmpty()) throw ex("field cannot be non-null or empty."); ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); // Split field name up so that we can drill down to the end of any linked fields. String[] fieldParts = field.split("\\Q" + FLAT_SEP + "\\E"); for (String fieldPart : fieldParts) { // Make sure we have this field part. if (!fieldData.hasField(fieldPart)) return false; // Now, if the field type is RealmObject or RealmList, we'll need to drill down. if (fieldData.isRealmListType(fieldPart) || fieldData.isRealmObjectType(fieldPart)) { // Try to get it as a realm list type. Class clazz = fieldData.realmListType(fieldPart); // If that doesn't work, do it the normal way. if (clazz == null) clazz = fieldData.fieldType(fieldPart); // Either way, we now have something which extends RealmObject. Get the field data for that object so // that we can check the next part of the link field in the next iteration. // noinspection unchecked fieldData = classData.getFieldData(clazz); continue; } // If it isn't, we can return true, because we already checked that we have it. return true; } throw ex("Couldn't verify if \"%\" has field \"%s\".", realmClass, field); } /** * Get the enum type of a [flat-]field's type. If this is a flat-field (e.g., the immediate type on the class is a * RealmObject subclass or a RealmList of such), this will drill down to the end of the flat-field to get the type * from the end of it. * <p/> * For example, if {@code field} is something like "age", and the type for it in {@code realmClass} is Integer, * that's what would be returned.<br>But if instead {@code field} was something like "dog.age", where the immediate * type is a class called "{@code Dog}" which extends RealmObject and has an Integer field called "age", this method * would drill down and find that information, and still return Integer. * <p/> * Caches values for quicker future access. * @param realmClass Name of RealmObject subclass which contains the {@code field}. * @param field Name of the field whose type is being retrieved. * @return Enum type of the field at the end of a flat-field. */ static FieldType typeEnumForField(String realmClass, String field) { if (field == null || field.isEmpty()) throw ex("field cannot be non-null or empty."); ensureInit(); String key = realmClass + "$" + field; if (INSTANCE.flatFieldToFieldType.containsKey(key)) return INSTANCE.flatFieldToFieldType.get(key); else { ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); // Split field name up so that we can drill down to the end of any linked fields. String[] fieldParts = field.split("\\Q" + FLAT_SEP + "\\E"); Class fieldTypeClazz = null; for (String fieldPart : fieldParts) { // Try to get it as a realm list type. fieldTypeClazz = fieldData.realmListType(fieldPart); // If that doesn't work, do it the normal way. if (fieldTypeClazz == null) fieldTypeClazz = fieldData.fieldType(fieldPart); // If that still didn't work, we have an issue. if (fieldTypeClazz == null) throw ex("Couldn't get type for \"%s\" on \"%s\".", field, realmClass); // Now, check to see if this type is a subclass of RealmObject. if (RealmObject.class.isAssignableFrom(fieldTypeClazz)) { // It is, so we need to get the field data for that type, and we'll try again in the next iteration. // noinspection unchecked fieldData = classData.getFieldData((Class<? extends RealmObject>) fieldTypeClazz); } } FieldType fieldType = FieldType.fromClazz(fieldTypeClazz); INSTANCE.flatFieldToFieldType.put(key, fieldType); return fieldType; } } /** * Return a list of visible names for all fields on the given RealmObject subclass, but for any fields whose types * are either also RealmObject subclass or RealmList, add entries for their fields as well. * <p/> * TODO something is broke here. * @param realmClass Name of the RealmObject subclass. * @return List of visible flat field names. */ static ArrayList<String> visibleFlatFieldsForClass(String realmClass) { ClassData classData = getClassData(); return _visibleFlatFieldsForClass(classData, classData.getFieldData(realmClass), ""); } private static ArrayList<String> _visibleFlatFieldsForClass(ClassData classData, FieldData fieldData, String prepend) { ArrayList<String> vNames = new ArrayList<>(); // Loop through real names. for (String name : fieldData.getFieldNames()) { // Get visible name of field. String visibleName = fieldData.visibleNameOf(name); // Do something different based on field's type. if (fieldData.isRealmListType(name)) { // Field type is RealmList, recurse and get its visible names as well. vNames.addAll(_visibleFlatFieldsForClass(classData, classData.getFieldData(fieldData.realmListType(name)), prepend + VIS_FLAT_SEP + visibleName)); } else if (fieldData.isRealmObjectType(name)) { // Field type is RealmObject, recurse and get its visible names as well. //noinspection unchecked vNames.addAll(_visibleFlatFieldsForClass(classData, classData.getFieldData((Class<? extends RealmObject>) fieldData.fieldType(name)), prepend + VIS_FLAT_SEP + visibleName)); } else { // Normal field, just add its visible name. vNames.add(visibleName); } } return vNames; } /** * Takes a visible flat field name and converts it to a real flat field name. * @param realmClass Name of the RealmObject subclass. * @param visibleFieldName Visible flat field name. * @return Real flat field name. */ static String fieldFromVisibleField(String realmClass, String visibleFieldName) { ensureInit(); String key = realmClass + "$" + visibleFieldName; // Try to get cached value first. if (INSTANCE.flatVisFieldToFlatField.containsKey(key)) return INSTANCE.flatVisFieldToFlatField.get(key); // If not cached, must go figure it out. ClassData classData = getClassData(); FieldData fieldData = classData.getFieldData(realmClass); String[] parts = visibleFieldName.split("\\Q" + VIS_FLAT_SEP + "\\E"); StringBuilder builder = new StringBuilder(); for (int i = 0; i < parts.length; i++) { // Use the field data to get the real name of the field. String realFieldName = fieldData.getFieldNames().get(fieldData.getVisibleNames().indexOf(parts[i])); // Append the real name. builder.append(realFieldName); if (i < parts.length - 1) { // This is a RealmObject/RealmList-type field. Append a dot and switch the field data. builder.append(FLAT_SEP); if (fieldData.isRealmListType(realFieldName)) { fieldData = classData.getFieldData(fieldData.realmListType(realFieldName)); } else { //noinspection unchecked fieldData = classData.getFieldData( (Class<? extends RealmObject>) fieldData.fieldType(realFieldName)); } } } // Cache this before returning it. String value = builder.toString(); INSTANCE.flatVisFieldToFlatField.put(key, realmClass + "$" + value); return value; } /** * Takes a real flat field name and converts it to a visible flat field name. * @param realmClass Name of the RealmObject subclass. * @param field Real flat field name. * @return Visible flat field name. */ static String visibleFieldFromField(String realmClass, String field) { ensureInit(); String value = realmClass + "$" + field; // Try to get cached key first. if (INSTANCE.flatVisFieldToFlatField.containsValue(value)) { for (Map.Entry<String, String> entry : INSTANCE.flatVisFieldToFlatField.entrySet()) { if (entry.getValue().equals(value)) return entry.getKey(); } } // If not cached, must go figure it out. StringBuilder builder = new StringBuilder(); String className = realmClass; String[] parts = field.split("\\Q" + FLAT_SEP + "\\E"); for (int i = 0; i < parts.length; i++) { if (i != parts.length - 1) { // Not at the end of the link yet. className = parts[i]; builder.append(INSTANCE.classData.visibleNameOf(parts[i])) .append(VIS_FLAT_SEP); } else { // This is the end of the link. builder.append(getFieldData(className).visibleNameOf(parts[i])); } } // Cache this before returning it. String key = builder.toString(); INSTANCE.flatVisFieldToFlatField.put(realmClass + "$" + key, value); return key; } /** * Check if a {@code field} of a {@code realmClass} is of the given {@code type}. (Note that this method uses {@link * Class#isAssignableFrom(Class)} to check if the given {@code type} can be used for the field; that is, {@code * type} may be a subclass of the field's actual type. * @param realmClass Name of RealmObject subclass which contains the {@code field}. * @param field Name of the field whose type is being checked. * @param type Type class. * @return True if {@code type} is assignable to {@code field}'s actual type. */ static boolean fieldIsOfType(String realmClass, String field, FieldType type) { FieldData fieldData = getFieldData(realmClass); if (fieldData == null) throw ex("\"%s\" is not a valid realm object class name.", realmClass); Class<?> actualType = fieldData.fieldType(field); if (actualType == null) throw ex("\"%s\" is not a valid field name for the class \"%s\".", field, realmClass); return actualType.isAssignableFrom(type.getClazz()); } /** * Gets the real name of a transformer class whose visible name is {@code visibleTransName}. * @param visibleTransName Visible name of transformer. * @param isNoArgs Whether the transformer is a no-args transformer or not. * @return Real name of transformer. */ static String transformerNameFromVisibleName(String visibleTransName, boolean isNoArgs) { TransformerData transformerData = getTransformerData(); return isNoArgs ? transformerData.getNoArgNames().get(transformerData.getVisibleNoArgNames().indexOf(visibleTransName)) : transformerData.getNames().get(transformerData.getVisibleNames().indexOf(visibleTransName)); } /** * Gets the number of arguments which the transformer whose name is {@code transformerName} accepts. * @param transformerName Real name of transformer. * @return Number of arguments accepted. May be {@link C#VAR_ARGS}, which equates to -1. */ static int numberOfArgsFor(String transformerName) { return getTransformerData().numArgsOf(transformerName); } /** * Whether or not the transformer whose name is {@code transformerName} accepts the given {@code type}. * @param transformerName Real name of a normal transformer. * @param type Type to check for. * @return True if the transformer with the given name accepts the given type, otherwise false. */ static boolean transformerAcceptsType(String transformerName, Class type) { return getTransformerData().acceptsType(transformerName, type); } /** * Get an instance of the transformer whose real name is {@code transformerName}. * @param transformerName Fully-qualified name of the transformer class to get an instance of. * @return Instance of the transformer class with the given name. */ static RUQTransformer getTransformer(String transformerName) { return getTransformerData().getTransformer(transformerName); } /** * Convenience method for throwing an IllegalArgumentException with a formatted string. */ private static IllegalArgumentException ex(String format, Object... args) { return new IllegalArgumentException(String.format(format, args)); } }
String constants in Ruqus
ruqus-core/src/main/java/com/bkromhout/ruqus/Ruqus.java
String constants in Ruqus
Java
mit
4418f9523130fa948ca4ea1f76b889e649a9535d
0
igm-team/atav,igm-team/atav,igm-team/atav,igm-team/atav
package function.annotation.base; import function.genotype.collapsing.CollapsingCommand; import utils.ErrorManager; import utils.LogManager; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import function.variant.base.RegionManager; import java.sql.Statement; import utils.DBManager; /** * * @author nick */ public class GeneManager { public static final String TMP_GENE_TABLE = "tmp_gene_chr"; // need to append chr in real time private static HashMap<String, HashSet<Gene>> geneMap = new HashMap<>(); private static HashMap<String, StringBuilder> chrAllGeneMap = new HashMap<>(); private static HashMap<String, HashSet<Gene>> geneMapByName = new HashMap<>(); private static final HashMap<String, HashSet<Gene>> geneMapByBoundaries = new HashMap<>(); private static ArrayList<Gene> geneBoundaryList = new ArrayList<>(); private static int allGeneBoundaryLength; private static HashMap<String, String> geneCoverageSummaryMap = new HashMap<>(); private static boolean isUsed = false; public static void init() throws Exception { initGeneName(); initGeneBoundaries(); initGeneMap(); resetRegionList(); initTempTable(); } private static void initGeneName() throws Exception { if (AnnotationLevelFilterCommand.geneInput.isEmpty()) { return; } isUsed = true; File f = new File(AnnotationLevelFilterCommand.geneInput); if (f.isFile()) { initFromFile(f); } else { String[] genes = AnnotationLevelFilterCommand.geneInput.split(","); for (String geneName : genes) { Gene gene = new Gene(geneName); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); geneMapByName.put(geneName, set); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } } private static void initFromFile(File f) { String lineStr = ""; int lineNum = 0; try { FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); while ((lineStr = br.readLine()) != null) { lineNum++; if (lineStr.isEmpty()) { continue; } Gene gene = new Gene(lineStr); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); geneMapByName.put(lineStr, set); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } catch (Exception e) { LogManager.writeAndPrintNoNewLine("\nError line (" + lineNum + ") in gene file: " + lineStr); ErrorManager.send(e); } } private static void initGeneBoundaries() throws Exception { if (AnnotationLevelFilterCommand.geneBoundaryFile.isEmpty()) { return; } isUsed = true; File f = new File(AnnotationLevelFilterCommand.geneBoundaryFile); FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line; int geneIndex = 0; allGeneBoundaryLength = 0; while ((line = br.readLine()) != null) { if (!line.isEmpty()) { line = line.replaceAll("\"", "").replaceAll("\t", " "); Gene gene = new Gene(line); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); String geneId = gene.getName(); if (geneId.contains("_")) { // if using gene domain String geneName = geneId.substring(0, geneId.indexOf("_")); if (!geneMapByBoundaries.containsKey(geneName)) { geneMapByBoundaries.put(geneName, set); } else { geneMapByBoundaries.get(geneName).add(gene); } } else { geneMapByBoundaries.put(geneId, set); } gene.setIndex(geneIndex++); geneBoundaryList.add(gene); allGeneBoundaryLength += gene.getLength(); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } } private static void initGeneMap() { if (isUsed) { if (geneMapByName.isEmpty()) { geneMap.putAll(geneMapByBoundaries); } else if (geneMapByBoundaries.isEmpty()) { geneMap.putAll(geneMapByName); } else { HashSet<String> nameSet = new HashSet<>(); nameSet.addAll(geneMapByName.keySet()); nameSet.addAll(geneMapByBoundaries.keySet()); for (String geneName : nameSet) { if (geneMapByName.containsKey(geneName) && geneMapByBoundaries.containsKey(geneName)) { HashSet<Gene> set = geneMapByBoundaries.get(geneName); geneMap.put(geneName, set); } } } } } private static void resetRegionList() throws Exception { if (isUsed) { ArrayList<String> chrList = new ArrayList<>(); for (String chr : RegionManager.ALL_CHR) { chrAllGeneMap.put(chr, new StringBuilder()); } geneMap.entrySet().stream().forEach((entry) -> { Gene gene = entry.getValue().iterator().next(); if (!gene.getChr().isEmpty()) { if (!chrList.contains(gene.getChr())) { chrList.add(gene.getChr()); } StringBuilder sb = chrAllGeneMap.get(gene.getChr()); if (sb.length() == 0) { sb.append("('").append(entry.getKey()).append("')"); } else { sb.append(",('").append(entry.getKey()).append("')"); } } }); if (!RegionManager.isUsed()) { RegionManager.clear(); RegionManager.initChrRegionList(chrList.toArray(new String[chrList.size()])); RegionManager.sortRegionList(); } } } private static void initTempTable() { try { for (String chr : chrAllGeneMap.keySet()) { Statement stmt = DBManager.createStatementByReadOnlyConn(); // create table stmt.executeUpdate( "CREATE TEMPORARY TABLE " + TMP_GENE_TABLE + chr + "(" + "input_gene varchar(128) NOT NULL, " + "PRIMARY KEY (input_gene)) ENGINE=TokuDB;"); if (chrAllGeneMap.get(chr).length() > 0) { // insert values stmt.executeUpdate("INSERT INTO " + TMP_GENE_TABLE + chr + " values " + chrAllGeneMap.get(chr)); stmt.closeOnCompletion(); } } chrAllGeneMap.clear(); // free memmory } catch (Exception e) { ErrorManager.send(e); } } public static void initCoverageSummary() throws Exception { if (CollapsingCommand.coverageSummaryFile.isEmpty()) { return; } File f = new File(CollapsingCommand.coverageSummaryFile); FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line; boolean isTitle = true; while ((line = br.readLine()) != null) { if (!line.isEmpty()) { int firstCommaIndex = line.indexOf(","); String firstRowValue = line.substring(0, firstCommaIndex); String restRowValues = line.substring(firstCommaIndex + 1); if (isTitle) { isTitle = false; geneCoverageSummaryMap.put("title", restRowValues); } if (!geneCoverageSummaryMap.containsKey(firstRowValue)) { geneCoverageSummaryMap.put(firstRowValue, restRowValues); } } } } public static String getCoverageSummary(String geneName) { if (geneCoverageSummaryMap.containsKey(geneName)) { return geneCoverageSummaryMap.get(geneName) + ","; } else { return ""; } } public static HashMap<String, HashSet<Gene>> getMap() { return geneMap; } public static ArrayList<Gene> getGeneBoundaryList() { return geneBoundaryList; } public static int getAllGeneBoundaryLength() { return allGeneBoundaryLength; } public static boolean isValid(Annotation annotation, String chr, int pos) { if (geneMap.isEmpty()) { return true; } HashSet<Gene> set = geneMap.get(annotation.geneName); if (set != null) { if (GeneManager.getGeneBoundaryList().isEmpty()) { return true; } else { for (Gene gene : set) { if (gene.contains(chr, pos)) { annotation.geneName = gene.getName(); return true; } } } } return false; } public static boolean isUsed() { return isUsed; } }
src/main/java/function/annotation/base/GeneManager.java
package function.annotation.base; import function.genotype.collapsing.CollapsingCommand; import utils.ErrorManager; import utils.LogManager; import java.io.*; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import function.variant.base.RegionManager; import java.sql.Statement; import utils.DBManager; /** * * @author nick */ public class GeneManager { public static final String TMP_GENE_TABLE = "tmp_gene_chr"; // need to append chr in real time private static HashMap<String, HashSet<Gene>> geneMap = new HashMap<>(); private static HashMap<String, StringBuilder> chrAllGeneMap = new HashMap<>(); private static HashMap<String, HashSet<Gene>> geneMapByName = new HashMap<>(); private static final HashMap<String, HashSet<Gene>> geneMapByBoundaries = new HashMap<>(); private static ArrayList<Gene> geneBoundaryList = new ArrayList<>(); private static int allGeneBoundaryLength; private static HashMap<String, String> geneCoverageSummaryMap = new HashMap<>(); private static boolean isUsed = false; public static void init() throws Exception { initGeneName(); initGeneBoundaries(); initGeneMap(); resetRegionList(); initTempTable(); } private static void initGeneName() throws Exception { if (AnnotationLevelFilterCommand.geneInput.isEmpty()) { return; } isUsed = true; File f = new File(AnnotationLevelFilterCommand.geneInput); if (f.isFile()) { initFromFile(f); } else { String[] genes = AnnotationLevelFilterCommand.geneInput.split(","); for (String geneName : genes) { Gene gene = new Gene(geneName); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); geneMapByName.put(geneName, set); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } } private static void initFromFile(File f) { String lineStr = ""; int lineNum = 0; try { FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); while ((lineStr = br.readLine()) != null) { lineNum++; if (lineStr.isEmpty()) { continue; } Gene gene = new Gene(lineStr); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); geneMapByName.put(lineStr, set); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } catch (Exception e) { LogManager.writeAndPrintNoNewLine("\nError line (" + lineNum + ") in gene file: " + lineStr); ErrorManager.send(e); } } private static void initGeneBoundaries() throws Exception { if (AnnotationLevelFilterCommand.geneBoundaryFile.isEmpty()) { return; } isUsed = true; File f = new File(AnnotationLevelFilterCommand.geneBoundaryFile); FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line; int geneIndex = 0; allGeneBoundaryLength = 0; while ((line = br.readLine()) != null) { if (!line.isEmpty()) { line = line.replaceAll("\"", "").replaceAll("\t", " "); Gene gene = new Gene(line); if (gene.isValid()) { HashSet<Gene> set = new HashSet<>(); set.add(gene); String geneId = gene.getName(); if (geneId.contains("_")) { // if using gene domain String geneName = geneId.substring(0, geneId.indexOf("_")); if (!geneMapByBoundaries.containsKey(geneName)) { geneMapByBoundaries.put(geneName, set); } else { geneMapByBoundaries.get(geneName).add(gene); } } else { geneMapByBoundaries.put(geneId, set); } gene.setIndex(geneIndex++); geneBoundaryList.add(gene); allGeneBoundaryLength += gene.getLength(); } else { LogManager.writeAndPrint("Invalid gene: " + gene.getName()); } } } } private static void initGeneMap() { if (isUsed) { if (geneMapByName.isEmpty()) { geneMap.putAll(geneMapByBoundaries); } else if (geneMapByBoundaries.isEmpty()) { geneMap.putAll(geneMapByName); } else { HashSet<String> nameSet = new HashSet<>(); nameSet.addAll(geneMapByName.keySet()); nameSet.addAll(geneMapByBoundaries.keySet()); for (String geneName : nameSet) { if (geneMapByName.containsKey(geneName) && geneMapByBoundaries.containsKey(geneName)) { HashSet<Gene> set = geneMapByBoundaries.get(geneName); geneMap.put(geneName, set); } } } } } private static void resetRegionList() throws Exception { if (isUsed) { ArrayList<String> chrList = new ArrayList<>(); for (String chr : RegionManager.ALL_CHR) { chrAllGeneMap.put(chr, new StringBuilder()); } geneMap.entrySet().stream().forEach((entry) -> { Gene gene = entry.getValue().iterator().next(); if (!gene.getChr().isEmpty()) { if (!chrList.contains(gene.getChr())) { chrList.add(gene.getChr()); } StringBuilder sb = chrAllGeneMap.get(gene.getChr()); if (sb.length() == 0) { sb.append("('").append(entry.getKey()).append("')"); } else { sb.append(",('").append(entry.getKey()).append("')"); } } }); if (!RegionManager.isUsed()) { RegionManager.clear(); RegionManager.initChrRegionList(chrList.toArray(new String[chrList.size()])); RegionManager.sortRegionList(); } } } private static void initTempTable() { try { for (String chr : chrAllGeneMap.keySet()) { Statement stmt = DBManager.createStatementByReadOnlyConn(); // create table stmt.executeUpdate( "CREATE TEMPORARY TABLE " + TMP_GENE_TABLE + chr + "(" + "input_gene varchar(128) NOT NULL, " + "PRIMARY KEY (input_gene)) ENGINE=TokuDB;"); if (chrAllGeneMap.get(chr).length() > 0) { // insert values stmt.executeUpdate("INSERT INTO " + TMP_GENE_TABLE + chr + " values " + chrAllGeneMap.get(chr)); stmt.closeOnCompletion(); } } chrAllGeneMap.clear(); // free memmory } catch (Exception e) { ErrorManager.send(e); } } public static void initCoverageSummary() throws Exception { if (CollapsingCommand.coverageSummaryFile.isEmpty()) { return; } File f = new File(CollapsingCommand.coverageSummaryFile); FileInputStream fstream = new FileInputStream(f); DataInputStream in = new DataInputStream(fstream); BufferedReader br = new BufferedReader(new InputStreamReader(in)); String line; boolean isTitle = true; while ((line = br.readLine()) != null) { if (!line.isEmpty()) { int firstCommaIndex = line.indexOf(","); String firstRowValue = line.substring(0, firstCommaIndex); String restRowValues = line.substring(firstCommaIndex + 1); if (isTitle) { isTitle = false; geneCoverageSummaryMap.put("title", restRowValues); } if (!geneCoverageSummaryMap.containsKey(firstRowValue)) { geneCoverageSummaryMap.put(firstRowValue, restRowValues); } } } } public static String getCoverageSummary(String geneName) { if (geneCoverageSummaryMap.containsKey(geneName)) { return geneCoverageSummaryMap.get(geneName); } else { return ""; } } public static HashMap<String, HashSet<Gene>> getMap() { return geneMap; } public static ArrayList<Gene> getGeneBoundaryList() { return geneBoundaryList; } public static int getAllGeneBoundaryLength() { return allGeneBoundaryLength; } public static boolean isValid(Annotation annotation, String chr, int pos) { if (geneMap.isEmpty()) { return true; } HashSet<Gene> set = geneMap.get(annotation.geneName); if (set != null) { if (GeneManager.getGeneBoundaryList().isEmpty()) { return true; } else { for (Gene gene : set) { if (gene.contains(chr, pos)) { annotation.geneName = gene.getName(); return true; } } } } return false; } public static boolean isUsed() { return isUsed; } }
added missing comma
src/main/java/function/annotation/base/GeneManager.java
added missing comma
Java
mit
ace1ca1ec43ddd2580af5d491bdc841f2e45ac83
0
greysonp/hipchat
package com.stupid.hipchat; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.hardware.Sensor; import android.hardware.SensorEvent; import android.hardware.SensorEventListener; import android.hardware.SensorManager; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.*; import android.view.inputmethod.EditorInfo; import android.widget.EditText; import android.widget.ImageButton; import android.widget.TextView; import android.widget.Toast; import com.github.nkzawa.emitter.Emitter; import com.github.nkzawa.socketio.client.IO; import com.github.nkzawa.socketio.client.Socket; import org.json.JSONException; import org.json.JSONObject; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; import java.util.Timer; import java.util.TimerTask; /** * A chat fragment containing messages view and input form. */ public class MainFragment extends Fragment { private static final String TAG = "MainFragment"; private static final int REQUEST_LOGIN = 0; private static final int TYPING_TIMER_LENGTH = 600; private SensorManager mSensorManager; private SensorEventListener mShakira; private RecyclerView mMessagesView; private EditText mInputMessageView; private List<Message> mMessages = new ArrayList<Message>(); private RecyclerView.Adapter mAdapter; private boolean mTyping = false; private Handler mTypingHandler = new Handler(); private String mUsername; private Socket mSocket; { try { mSocket = IO.socket("http://hipsdontlie.herokuapp.com"); } catch (URISyntaxException e) { throw new RuntimeException(e); } } public MainFragment() { super(); } @Override public void onAttach(Activity activity) { super.onAttach(activity); mAdapter = new MessageAdapter(activity, mMessages); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mShakira = new Shakira(); mSensorManager = (SensorManager) getActivity().getSystemService(Context.SENSOR_SERVICE); mSensorManager.registerListener(mShakira, mSensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE), SensorManager.SENSOR_DELAY_NORMAL); setHasOptionsMenu(true); mSocket.on(Socket.EVENT_CONNECT_ERROR, onConnectError); mSocket.on(Socket.EVENT_CONNECT_TIMEOUT, onConnectError); mSocket.on("new message", onNewMessage); mSocket.on("user joined", onUserJoined); mSocket.on("user left", onUserLeft); mSocket.on("typing", onTyping); mSocket.on("stop typing", onStopTyping); mSocket.connect(); startSignIn(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_main, container, false); } @Override public void onDestroy() { super.onDestroy(); mSocket.disconnect(); mSocket.off(Socket.EVENT_CONNECT_ERROR, onConnectError); mSocket.off(Socket.EVENT_CONNECT_TIMEOUT, onConnectError); mSocket.off("new message", onNewMessage); mSocket.off("user joined", onUserJoined); mSocket.off("user left", onUserLeft); mSocket.off("typing", onTyping); mSocket.off("stop typing", onStopTyping); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mMessagesView = (RecyclerView) view.findViewById(R.id.messages); mMessagesView.setLayoutManager(new LinearLayoutManager(getActivity())); mMessagesView.setAdapter(mAdapter); mInputMessageView = (EditText) view.findViewById(R.id.message_input); mInputMessageView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int id, KeyEvent event) { if (id == R.id.send || id == EditorInfo.IME_NULL) { attemptSend(); return true; } return false; } }); mInputMessageView.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { if (null == mUsername) return; if (!mSocket.connected()) return; if (!mTyping) { mTyping = true; mSocket.emit("typing"); } mTypingHandler.removeCallbacks(onTypingTimeout); mTypingHandler.postDelayed(onTypingTimeout, TYPING_TIMER_LENGTH); } @Override public void afterTextChanged(Editable s) { } }); ImageButton sendButton = (ImageButton) view.findViewById(R.id.send_button); sendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { attemptSend(); } }); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (Activity.RESULT_OK != resultCode) { getActivity().finish(); return; } mUsername = data.getStringExtra("username"); int numUsers = data.getIntExtra("numUsers", 1); addLog(getResources().getString(R.string.message_welcome)); addParticipantsLog(numUsers); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // Inflate the menu; this adds items to the action bar if it is present. inflater.inflate(R.menu.menu_main, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_leave) { leave(); return true; } return super.onOptionsItemSelected(item); } private void addLog(String message) { mMessages.add(new Message.Builder(Message.TYPE_LOG) .message(message).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void addParticipantsLog(int numUsers) { addLog(getResources().getQuantityString(R.plurals.message_participants, numUsers, numUsers)); } private void addMessage(String username, String message) { mMessages.add(new Message.Builder(Message.TYPE_MESSAGE) .username(username).message(message).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void addTyping(String username) { mMessages.add(new Message.Builder(Message.TYPE_ACTION) .username(username).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void removeTyping(String username) { for (int i = mMessages.size() - 1; i >= 0; i--) { Message message = mMessages.get(i); if (message.getType() == Message.TYPE_ACTION && message.getUsername().equals(username)) { mMessages.remove(i); mAdapter.notifyItemRemoved(i); } } } private void attemptSend() { if (null == mUsername) return; if (!mSocket.connected()) return; mTyping = false; String message = mInputMessageView.getText().toString().trim(); if (TextUtils.isEmpty(message)) { mInputMessageView.requestFocus(); return; } mInputMessageView.setText(""); addMessage(mUsername, message); // perform the sending message attempt. mSocket.emit("new message", message); } private void startSignIn() { mUsername = null; Intent intent = new Intent(getActivity(), LoginActivity.class); startActivityForResult(intent, REQUEST_LOGIN); } private void leave() { mUsername = null; mSocket.disconnect(); mSocket.connect(); startSignIn(); } private void scrollToBottom() { mMessagesView.scrollToPosition(mAdapter.getItemCount() - 1); } private Emitter.Listener onConnectError = new Emitter.Listener() { @Override public void call(Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getActivity().getApplicationContext(), R.string.error_connect, Toast.LENGTH_LONG).show(); } }); } }; private Emitter.Listener onNewMessage = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; String message; try { username = data.getString("username"); message = data.getString("message"); } catch (JSONException e) { return; } removeTyping(username); addMessage(username, message); } }); } }; private Emitter.Listener onUserJoined = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; int numUsers; try { username = data.getString("username"); numUsers = data.getInt("numUsers"); } catch (JSONException e) { return; } addLog(getResources().getString(R.string.message_user_joined, username)); addParticipantsLog(numUsers); } }); } }; private Emitter.Listener onUserLeft = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; int numUsers; try { username = data.getString("username"); numUsers = data.getInt("numUsers"); } catch (JSONException e) { return; } addLog(getResources().getString(R.string.message_user_left, username)); addParticipantsLog(numUsers); removeTyping(username); } }); } }; private Emitter.Listener onTyping = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; try { username = data.getString("username"); } catch (JSONException e) { return; } addTyping(username); } }); } }; private Emitter.Listener onStopTyping = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; try { username = data.getString("username"); } catch (JSONException e) { return; } removeTyping(username); } }); } }; private Runnable onTypingTimeout = new Runnable() { @Override public void run() { if (!mTyping) return; mTyping = false; mSocket.emit("stop typing"); } }; private static class Shakira implements SensorEventListener{ private float x = 0; private float y = 0; private Timer lockTimer; private boolean locked; private static final float THRESHOLD = 1.25f; private static final int LOCK_DURATION = 750; public Shakira() { lockTimer = new Timer(); } @Override public void onSensorChanged(SensorEvent event) { x = event.values[0]; y = event.values[1]; // Do stuff if (y > THRESHOLD && !locked) { shakeRight(); } else if (y < -THRESHOLD && !locked) { shakeLeft(); } // Log.d(TAG, "[" + x + "], [" + y + "], [" + zNew + "]"); } @Override public void onAccuracyChanged(Sensor sensor, int accuracy) { } private void shakeRight() { Log.d(TAG, "RIGHT"); startLock(); } private void shakeLeft() { Log.d(TAG, "LEFT"); startLock(); } private void startLock() { locked = true; lockTimer.schedule(new TimerTask() { @Override public void run() { locked = false; } }, LOCK_DURATION); } } }
app/src/main/java/com/stupid/hipchat/MainFragment.java
package com.stupid.hipchat; import android.app.Activity; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v7.widget.LinearLayoutManager; import android.support.v7.widget.RecyclerView; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.view.*; import android.view.inputmethod.EditorInfo; import android.widget.EditText; import android.widget.ImageButton; import android.widget.TextView; import android.widget.Toast; import com.github.nkzawa.emitter.Emitter; import com.stupid.hipchat.R; import com.github.nkzawa.socketio.client.IO; import com.github.nkzawa.socketio.client.Socket; import org.json.JSONException; import org.json.JSONObject; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.List; /** * A chat fragment containing messages view and input form. */ public class MainFragment extends Fragment { private static final int REQUEST_LOGIN = 0; private static final int TYPING_TIMER_LENGTH = 600; private RecyclerView mMessagesView; private EditText mInputMessageView; private List<Message> mMessages = new ArrayList<Message>(); private RecyclerView.Adapter mAdapter; private boolean mTyping = false; private Handler mTypingHandler = new Handler(); private String mUsername; private Socket mSocket; { try { mSocket = IO.socket("http://hipsdontlie.herokuapp.com"); } catch (URISyntaxException e) { throw new RuntimeException(e); } } public MainFragment() { super(); } @Override public void onAttach(Activity activity) { super.onAttach(activity); mAdapter = new MessageAdapter(activity, mMessages); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); mSocket.on(Socket.EVENT_CONNECT_ERROR, onConnectError); mSocket.on(Socket.EVENT_CONNECT_TIMEOUT, onConnectError); mSocket.on("new message", onNewMessage); mSocket.on("user joined", onUserJoined); mSocket.on("user left", onUserLeft); mSocket.on("typing", onTyping); mSocket.on("stop typing", onStopTyping); mSocket.connect(); startSignIn(); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { return inflater.inflate(R.layout.fragment_main, container, false); } @Override public void onDestroy() { super.onDestroy(); mSocket.disconnect(); mSocket.off(Socket.EVENT_CONNECT_ERROR, onConnectError); mSocket.off(Socket.EVENT_CONNECT_TIMEOUT, onConnectError); mSocket.off("new message", onNewMessage); mSocket.off("user joined", onUserJoined); mSocket.off("user left", onUserLeft); mSocket.off("typing", onTyping); mSocket.off("stop typing", onStopTyping); } @Override public void onViewCreated(View view, Bundle savedInstanceState) { super.onViewCreated(view, savedInstanceState); mMessagesView = (RecyclerView) view.findViewById(R.id.messages); mMessagesView.setLayoutManager(new LinearLayoutManager(getActivity())); mMessagesView.setAdapter(mAdapter); mInputMessageView = (EditText) view.findViewById(R.id.message_input); mInputMessageView.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView v, int id, KeyEvent event) { if (id == R.id.send || id == EditorInfo.IME_NULL) { attemptSend(); return true; } return false; } }); mInputMessageView.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { if (null == mUsername) return; if (!mSocket.connected()) return; if (!mTyping) { mTyping = true; mSocket.emit("typing"); } mTypingHandler.removeCallbacks(onTypingTimeout); mTypingHandler.postDelayed(onTypingTimeout, TYPING_TIMER_LENGTH); } @Override public void afterTextChanged(Editable s) { } }); ImageButton sendButton = (ImageButton) view.findViewById(R.id.send_button); sendButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { attemptSend(); } }); } @Override public void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (Activity.RESULT_OK != resultCode) { getActivity().finish(); return; } mUsername = data.getStringExtra("username"); int numUsers = data.getIntExtra("numUsers", 1); addLog(getResources().getString(R.string.message_welcome)); addParticipantsLog(numUsers); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // Inflate the menu; this adds items to the action bar if it is present. inflater.inflate(R.menu.menu_main, menu); } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); //noinspection SimplifiableIfStatement if (id == R.id.action_leave) { leave(); return true; } return super.onOptionsItemSelected(item); } private void addLog(String message) { mMessages.add(new Message.Builder(Message.TYPE_LOG) .message(message).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void addParticipantsLog(int numUsers) { addLog(getResources().getQuantityString(R.plurals.message_participants, numUsers, numUsers)); } private void addMessage(String username, String message) { mMessages.add(new Message.Builder(Message.TYPE_MESSAGE) .username(username).message(message).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void addTyping(String username) { mMessages.add(new Message.Builder(Message.TYPE_ACTION) .username(username).build()); mAdapter.notifyItemInserted(mMessages.size() - 1); scrollToBottom(); } private void removeTyping(String username) { for (int i = mMessages.size() - 1; i >= 0; i--) { Message message = mMessages.get(i); if (message.getType() == Message.TYPE_ACTION && message.getUsername().equals(username)) { mMessages.remove(i); mAdapter.notifyItemRemoved(i); } } } private void attemptSend() { if (null == mUsername) return; if (!mSocket.connected()) return; mTyping = false; String message = mInputMessageView.getText().toString().trim(); if (TextUtils.isEmpty(message)) { mInputMessageView.requestFocus(); return; } mInputMessageView.setText(""); addMessage(mUsername, message); // perform the sending message attempt. mSocket.emit("new message", message); } private void startSignIn() { mUsername = null; Intent intent = new Intent(getActivity(), LoginActivity.class); startActivityForResult(intent, REQUEST_LOGIN); } private void leave() { mUsername = null; mSocket.disconnect(); mSocket.connect(); startSignIn(); } private void scrollToBottom() { mMessagesView.scrollToPosition(mAdapter.getItemCount() - 1); } private Emitter.Listener onConnectError = new Emitter.Listener() { @Override public void call(Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { Toast.makeText(getActivity().getApplicationContext(), R.string.error_connect, Toast.LENGTH_LONG).show(); } }); } }; private Emitter.Listener onNewMessage = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; String message; try { username = data.getString("username"); message = data.getString("message"); } catch (JSONException e) { return; } removeTyping(username); addMessage(username, message); } }); } }; private Emitter.Listener onUserJoined = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; int numUsers; try { username = data.getString("username"); numUsers = data.getInt("numUsers"); } catch (JSONException e) { return; } addLog(getResources().getString(R.string.message_user_joined, username)); addParticipantsLog(numUsers); } }); } }; private Emitter.Listener onUserLeft = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; int numUsers; try { username = data.getString("username"); numUsers = data.getInt("numUsers"); } catch (JSONException e) { return; } addLog(getResources().getString(R.string.message_user_left, username)); addParticipantsLog(numUsers); removeTyping(username); } }); } }; private Emitter.Listener onTyping = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; try { username = data.getString("username"); } catch (JSONException e) { return; } addTyping(username); } }); } }; private Emitter.Listener onStopTyping = new Emitter.Listener() { @Override public void call(final Object... args) { getActivity().runOnUiThread(new Runnable() { @Override public void run() { JSONObject data = (JSONObject) args[0]; String username; try { username = data.getString("username"); } catch (JSONException e) { return; } removeTyping(username); } }); } }; private Runnable onTypingTimeout = new Runnable() { @Override public void run() { if (!mTyping) return; mTyping = false; mSocket.emit("stop typing"); } }; }
Detect left and right.
app/src/main/java/com/stupid/hipchat/MainFragment.java
Detect left and right.
Java
mit
48965f229af6187e0a2fe395c4c286ddde097a90
0
Team4761/TShirt-Cannon
package org.robockets.tshirtcannon; import org.robockets.buttonmanager.ButtonManager; import org.robockets.buttonmanager.buttons.ActionButton; import org.robockets.tshirtcannon.subsystems.cannon.AdjustCannon; import org.robockets.tshirtcannon.subsystems.cannon.AlignGatling; import org.robockets.tshirtcannon.subsystems.cannon.FireCannon; import org.robockets.tshirtcannon.subsystems.cannon.PopCannon; import org.robockets.tshirtcannon.subsystems.cannon.SpinGatling; import edu.wpi.first.wpilibj.Joystick; import edu.wpi.first.wpilibj.smartdashboard.SmartDashboard; /** * This class is the glue that binds the controls on the physical operator * interface to the commands and command groups that allow control of the robot. */ public class OI { public OI() { ButtonManager.addJoystick(0); //ButtonManager.addButton(new ActionButton(0, 0, new FireCannon(), false)); //ButtonManager.addButton(new ActionButton(0, 1, new AlignGatling(), false)); ButtonManager.addButton(new ActionButton(0, 6, new PopCannon(0.05), false)); ButtonManager.addButton(new ActionButton(0, 1, new AdjustCannon(ZAxisRelativeDirection.UP), true)); ButtonManager.addButton(new ActionButton(0, 4, new AdjustCannon(ZAxisRelativeDirection.DOWN), true)); ButtonManager.addButton(new ActionButton(0, 2, new SpinGatling(0.5), true)); ButtonManager.addButton(new ActionButton(0, 3, new SpinGatling(-0.5), true)); // SmartDashboard stuff. SmartDashboard.putData("Pop Cannon", new PopCannon(0.05)); SmartDashboard.putData("Adjust Cannon Upwards", new AdjustCannon(ZAxisRelativeDirection.UP)); SmartDashboard.putData("Adjust Cannon Downwards", new AdjustCannon(ZAxisRelativeDirection.DOWN)); SmartDashboard.putData("Spin Gatling Forward", new SpinGatling(1)); SmartDashboard.putData("Spin Gatling Backwards", new SpinGatling(-1)); } }
src/org/robockets/tshirtcannon/OI.java
package org.robockets.tshirtcannon; import org.robockets.buttonmanager.ButtonManager; import org.robockets.buttonmanager.buttons.ActionButton; import org.robockets.tshirtcannon.subsystems.cannon.AdjustCannon; import org.robockets.tshirtcannon.subsystems.cannon.AlignGatling; import org.robockets.tshirtcannon.subsystems.cannon.FireCannon; import org.robockets.tshirtcannon.subsystems.cannon.PopCannon; import org.robockets.tshirtcannon.subsystems.cannon.SpinGatling; import edu.wpi.first.wpilibj.Joystick; /** * This class is the glue that binds the controls on the physical operator * interface to the commands and command groups that allow control of the robot. */ public class OI { public OI() { ButtonManager.addJoystick(0); //ButtonManager.addButton(new ActionButton(0, 0, new FireCannon(), false)); //ButtonManager.addButton(new ActionButton(0, 1, new AlignGatling(), false)); ButtonManager.addButton(new ActionButton(0, 6, new PopCannon(0.05), false)); ButtonManager.addButton(new ActionButton(0, 1, new AdjustCannon(ZAxisRelativeDirection.UP), true)); ButtonManager.addButton(new ActionButton(0, 4, new AdjustCannon(ZAxisRelativeDirection.DOWN), true)); ButtonManager.addButton(new ActionButton(0, 2, new SpinGatling(0.5), true)); ButtonManager.addButton(new ActionButton(0, 3, new SpinGatling(-0.5), true)); } }
Add commands to SmartDashboard dash.
src/org/robockets/tshirtcannon/OI.java
Add commands to SmartDashboard dash.
Java
mit
ee09e4dc99f843b477a7ecfad87d310b69661bc1
0
seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core,seqcode/seqcode-core
package edu.psu.compbio.seqcode.projects.akshay.clusterkmerprofile; import java.awt.Color; import java.awt.Font; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.Vector; import org.tc33.jheatchart.HeatChart; import edu.psu.compbio.seqcode.gse.tools.utils.Args; import edu.psu.compbio.seqcode.gse.utils.io.RegionFileUtilities; import edu.psu.compbio.seqcode.machinelearning.clustering.Cluster; import edu.psu.compbio.seqcode.machinelearning.clustering.ClusterRepresentative; import edu.psu.compbio.seqcode.machinelearning.clustering.ClusteringMethod; import edu.psu.compbio.seqcode.machinelearning.clustering.kmeans.KMeansClustering; public class ClusterProfiles { private KmerProfileEucDistComparator comparator; private ClusteringMethod<int[]> method; private KmerProfileAvgDistRep rep; private int K; private ArrayList<int[]> profiles; // Out tag name private String outtag="out"; // The name of the directory that holds all the results // Assumes that the directory has already been created private File outbase; private HashMap<Integer,String> indToLocation; private HashMap<Integer,Double> intToLogitScore; private int kmerLen=4; // Heatmap options public final int W_MARGIN=80; public final int H_MARGIN=60; public final int W=800; public final int H=800; //Settors public void setProfiles(ArrayList<int[]> data){profiles=data;} public void setNumClusters(int nc){K=nc;} public void setOuttag(String otag){outtag=otag;} public void setOutDir(File odir){outbase = odir;} public void setProfileInds(HashMap<Integer,String> plfsinds){indToLocation=plfsinds;} public void setProfileScores(HashMap<Integer,Double> plfscore){intToLogitScore = plfscore;} public void setKmerModLen(int k){kmerLen = k;} /** * The method that should be executed after initiating the class object * @throws IOException */ public void execute(String tag) throws IOException{ Collection<Cluster<int[]>> clusters = ((KMeansClustering<int[]>)method).clusterElements(profiles,0.01); Vector<int[]> clustermeans = ((KMeansClustering<int[]>)method).getClusterMeans(); //Print the clusters writeClusters(clustermeans,tag); //Plot the clusters Mappable orderedClusters = reorderKmerProfileMaps(clusters); drawClusterHeatmap(orderedClusters, tag); printMatrix(orderedClusters,tag); } // Slave methods private void writeClusters(Vector<int[]> clusMeans, String tag) throws IOException{ File clusout = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_clusterAssignment.list"); FileWriter ow = new FileWriter(clusout); BufferedWriter bw = new BufferedWriter(ow); for(int p=0; p<profiles.size(); p++){ int memebership = getClusterAssignment(profiles.get(p),clusMeans); bw.write(indToLocation.get(p)+"\t"+Integer.toString(memebership)+"\t"+Double.toString(intToLogitScore.get(p))+"\n"); } bw.close(); } private int getClusterAssignment(int[] pfl, Vector<int[]> clusMeans){ int minCluster = -1; double minDist = 0.0; for(int i = 0; i < clusMeans.size(); i++) { double clustDist = comparator.evaluate(pfl, clusMeans.get(i)); if(minCluster == -1 || clustDist < minDist) { minDist = clustDist; minCluster = i; } } return minCluster; } private Mappable reorderKmerProfileMaps(Collection<Cluster<int[]>> clus){ Mappable ret = null; //Mappable features double[][] matrix; String[] rnames; String[] cnames; // Which colums to retain while drawing the heatmap boolean[] keepCol = new boolean[profiles.get(0).length]; for(int i=0; i<keepCol.length; i++){ keepCol[i] = false; } //Which clusters to these columns belong to... ArrayList<Integer> colCluster = new ArrayList<Integer>(); for(int i=0; i<keepCol.length; i++){ colCluster.add(K+1); } int clusID=1; for(Cluster<int[]> c : clus){ for(int i=0; i<keepCol.length; i++){ for(int[] elems : c.getElements()){ if(elems[i] > 0){ keepCol[i] = true; if(clusID < colCluster.get(i)){ colCluster.set(i, clusID); } } } } clusID++; } // Now reorder ArrayIndexComparator comp = new ArrayIndexComparator(colCluster); Integer[] indexes = comp.createIndexArray(); Arrays.sort(indexes, comp); int sparseLenght = 0; for(int i=0;i<indexes.length; i++){ if(keepCol[indexes[i]]) sparseLenght++; else break; } matrix = new double[profiles.size()][sparseLenght]; rnames = new String[profiles.size()]; cnames = new String[sparseLenght]; //fill the cnames for(int j=0; j<sparseLenght; j++){ cnames[j] = RegionFileUtilities.int2seq(indexes[j], kmerLen); } int rowInd = 0; for(Cluster<int[]> c : clus){ for(int[] elems : c.getElements()){ for(int j=0; j<sparseLenght; j++){ matrix[rowInd][j] = elems[indexes[j]]; } rnames[rowInd] = indToLocation.get(rowInd); rowInd++; } } ret = new Mappable(matrix, rnames, cnames); return ret; } private void printMatrix(Mappable mat, String tag) throws IOException{ StringBuilder sb = new StringBuilder(); sb.append("Region"+"\t"); for(int c=0; c<mat.colnames.length; c++){ sb.append(mat.colnames[c]+"\t"); } sb.deleteCharAt(sb.length()-1);sb.append("\n"); for(int r=0; r<mat.rownmanes.length; r++){ sb.append(mat.rownmanes[r]+"\t"); for(int c=0; c<mat.colnames.length;c++){ sb.append(mat.matrix[r][c]);sb.append("\t"); } sb.deleteCharAt(sb.length()-1);sb.append("\n"); } File matout = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_kmer.mat"); FileWriter ow = new FileWriter(matout); BufferedWriter bw = new BufferedWriter(ow); bw.write(sb.toString()); bw.close(); } public void drawClusterHeatmap(Mappable plotMat, String tag) throws IOException{ double[][] matrix = plotMat.matrix; HeatChart map = new HeatChart(matrix); map.setHighValueColour(new Color(10)); map.setLowValueColour(new Color(20)); map.setChartMargin(100); map.setAxisLabelsFont(new Font("Ariel",Font.PLAIN,55)); map.setXValues(plotMat.rownmanes); map.setYValues(plotMat.colnames); File f = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_clusters.png"); map.saveToFile(f); } /** * Constructor that sets up the k-means object * @param itrs * @param k * @param pfls * @param otag */ public ClusterProfiles(int itrs, int k, ArrayList<int[]> pfls, HashMap<Integer,String> pflsIndsMap, int kmerL, HashMap<Integer,Double> pflscores, String otag, File odir) { setProfiles(pfls); setNumClusters(k); setOuttag(otag); setOutDir(odir); setProfileInds(pflsIndsMap); setProfileScores(pflscores); setKmerModLen(kmerL); comparator = new KmerProfileEucDistComparator(); rep = new KmerProfileAvgDistRep(comparator); Random generator = new Random(); List<int[]> starts = new ArrayList<int[]>(); for(int s=0; s<K; s++){ int r = generator.nextInt(profiles.size()); starts.add(profiles.get(r)); } method = new KMeansClustering<int[]>(comparator,rep,starts); ((KMeansClustering<int[]>)method).setIterations(itrs); } public class Mappable{ public double[][] matrix; public String[] rownmanes; public String[] colnames; public Mappable(double[][] m, String[] rnames, String[] cnames) { matrix = m; rownmanes = rnames; colnames = cnames; } } public class ArrayIndexComparator implements Comparator<Integer>{ ArrayList<Integer> list; public ArrayIndexComparator(ArrayList<Integer> ls) { list = ls; } public Integer[] createIndexArray(){ Integer[] indexes = new Integer[list.size()]; for(int i=0; i<indexes.length; i++){ indexes[i] = i; } return indexes; } @Override public int compare(Integer o1, Integer o2) { return list.get(o1).compareTo(list.get(o2)); } } }
src/edu/psu/compbio/seqcode/projects/akshay/clusterkmerprofile/ClusterProfiles.java
package edu.psu.compbio.seqcode.projects.akshay.clusterkmerprofile; import java.awt.Color; import java.awt.Font; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.Vector; import org.tc33.jheatchart.HeatChart; import edu.psu.compbio.seqcode.gse.tools.utils.Args; import edu.psu.compbio.seqcode.gse.utils.io.RegionFileUtilities; import edu.psu.compbio.seqcode.machinelearning.clustering.Cluster; import edu.psu.compbio.seqcode.machinelearning.clustering.ClusterRepresentative; import edu.psu.compbio.seqcode.machinelearning.clustering.ClusteringMethod; import edu.psu.compbio.seqcode.machinelearning.clustering.kmeans.KMeansClustering; public class ClusterProfiles { private KmerProfileEucDistComparator comparator; private ClusteringMethod<int[]> method; private KmerProfileAvgDistRep rep; private int K; private ArrayList<int[]> profiles; // Out tag name private String outtag="out"; // The name of the directory that holds all the results // Assumes that the directory has already been created private File outbase; private HashMap<Integer,String> indToLocation; private HashMap<Integer,Double> intToLogitScore; private int kmerLen=4; // Heatmap options public final int W_MARGIN=80; public final int H_MARGIN=60; public final int W=800; public final int H=800; //Settors public void setProfiles(ArrayList<int[]> data){profiles=data;} public void setNumClusters(int nc){K=nc;} public void setOuttag(String otag){outtag=otag;} public void setOutDir(File odir){outbase = odir;} public void setProfileInds(HashMap<Integer,String> plfsinds){indToLocation=plfsinds;} public void setProfileScores(HashMap<Integer,Double> plfscore){intToLogitScore = plfscore;} public void setKmerModLen(int k){kmerLen = k;} /** * The method that should be executed after initiating the class object * @throws IOException */ public void execute(String tag) throws IOException{ Collection<Cluster<int[]>> clusters = ((KMeansClustering<int[]>)method).clusterElements(profiles,0.01); Vector<int[]> clustermeans = ((KMeansClustering<int[]>)method).getClusterMeans(); //Print the clusters writeClusters(clustermeans,tag); //Plot the clusters Mappable orderedClusters = reorderKmerProfileMaps(clusters); drawClusterHeatmap(orderedClusters, tag); printMatrix(orderedClusters,tag); } // Slave methods private void writeClusters(Vector<int[]> clusMeans, String tag) throws IOException{ File clusout = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_clusterAssignment.list"); FileWriter ow = new FileWriter(clusout); BufferedWriter bw = new BufferedWriter(ow); for(int p=0; p<profiles.size(); p++){ int memebership = getClusterAssignment(profiles.get(p),clusMeans); bw.write(indToLocation.get(p)+"\t"+Integer.toString(memebership)+"\t"+Double.toString(intToLogitScore.get(p))+"\n"); } bw.close(); } private int getClusterAssignment(int[] pfl, Vector<int[]> clusMeans){ int minCluster = -1; double minDist = 0.0; for(int i = 0; i < clusMeans.size(); i++) { double clustDist = comparator.evaluate(pfl, clusMeans.get(i)); if(minCluster == -1 || clustDist < minDist) { minDist = clustDist; minCluster = i; } } return minCluster; } private Mappable reorderKmerProfileMaps(Collection<Cluster<int[]>> clus){ Mappable ret = null; //Mappable features double[][] matrix; String[] rnames; String[] cnames; // Which colums to retain while drawing the heatmap boolean[] keepCol = new boolean[profiles.get(0).length]; for(int i=0; i<keepCol.length; i++){ keepCol[i] = false; } //Which clusters to these columns belong to... ArrayList<Integer> colCluster = new ArrayList<Integer>(); for(int i=0; i<keepCol.length; i++){ colCluster.add(K+1); } int clusID=1; for(Cluster<int[]> c : clus){ for(int i=0; i<keepCol.length; i++){ for(int[] elems : c.getElements()){ if(elems[i] > 0){ keepCol[i] = true; if(clusID < colCluster.get(i)){ colCluster.set(i, clusID); } } } } clusID++; } // Now reorder ArrayIndexComparator comp = new ArrayIndexComparator(colCluster); Integer[] indexes = comp.createIndexArray(); Arrays.sort(indexes, comp); int sparseLenght = 0; for(int i=0;i<indexes.length; i++){ if(keepCol[indexes[i]]) sparseLenght++; else break; } matrix = new double[profiles.size()][sparseLenght]; rnames = new String[profiles.size()]; cnames = new String[sparseLenght]; //fill the cnames for(int j=0; j<sparseLenght; j++){ cnames[j] = RegionFileUtilities.int2seq(indexes[j], kmerLen); } int rowInd = 0; for(Cluster<int[]> c : clus){ for(int[] elems : c.getElements()){ for(int j=0; j<sparseLenght; j++){ matrix[rowInd][j] = elems[indexes[j]]; } rnames[rowInd] = indToLocation.get(rowInd); rowInd++; } } ret = new Mappable(matrix, rnames, cnames); return ret; } private void printMatrix(Mappable mat, String tag) throws IOException{ StringBuilder sb = new StringBuilder(); sb.append("Region"+"\t"); for(int c=0; c<mat.colnames.length; c++){ sb.append(mat.colnames[c]+"\t"); } sb.deleteCharAt(sb.length()-1); for(int r=0; r<mat.rownmanes.length; r++){ sb.append(mat.rownmanes[r]+"\t"); for(int c=0; c<mat.colnames.length;c++){ sb.append(mat.matrix[r][c]);sb.append("\t"); } sb.deleteCharAt(sb.length()-1); } File matout = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_kmer.mat"); FileWriter ow = new FileWriter(matout); BufferedWriter bw = new BufferedWriter(ow); bw.write(sb.toString()); bw.close(); } public void drawClusterHeatmap(Mappable plotMat, String tag) throws IOException{ double[][] matrix = plotMat.matrix; HeatChart map = new HeatChart(matrix); map.setHighValueColour(new Color(10)); map.setLowValueColour(new Color(20)); map.setChartMargin(100); map.setAxisLabelsFont(new Font("Ariel",Font.PLAIN,55)); map.setXValues(plotMat.rownmanes); map.setYValues(plotMat.colnames); File f = new File(outbase.getAbsolutePath()+File.separator+outtag+"_"+tag+"_clusters.png"); map.saveToFile(f); } /** * Constructor that sets up the k-means object * @param itrs * @param k * @param pfls * @param otag */ public ClusterProfiles(int itrs, int k, ArrayList<int[]> pfls, HashMap<Integer,String> pflsIndsMap, int kmerL, HashMap<Integer,Double> pflscores, String otag, File odir) { setProfiles(pfls); setNumClusters(k); setOuttag(otag); setOutDir(odir); setProfileInds(pflsIndsMap); setProfileScores(pflscores); setKmerModLen(kmerL); comparator = new KmerProfileEucDistComparator(); rep = new KmerProfileAvgDistRep(comparator); Random generator = new Random(); List<int[]> starts = new ArrayList<int[]>(); for(int s=0; s<K; s++){ int r = generator.nextInt(profiles.size()); starts.add(profiles.get(r)); } method = new KMeansClustering<int[]>(comparator,rep,starts); ((KMeansClustering<int[]>)method).setIterations(itrs); } public class Mappable{ public double[][] matrix; public String[] rownmanes; public String[] colnames; public Mappable(double[][] m, String[] rnames, String[] cnames) { matrix = m; rownmanes = rnames; colnames = cnames; } } public class ArrayIndexComparator implements Comparator<Integer>{ ArrayList<Integer> list; public ArrayIndexComparator(ArrayList<Integer> ls) { list = ls; } public Integer[] createIndexArray(){ Integer[] indexes = new Integer[list.size()]; for(int i=0; i<indexes.length; i++){ indexes[i] = i; } return indexes; } @Override public int compare(Integer o1, Integer o2) { return list.get(o1).compareTo(list.get(o2)); } } }
minor edits
src/edu/psu/compbio/seqcode/projects/akshay/clusterkmerprofile/ClusterProfiles.java
minor edits