index
int64 | repo_id
string | file_path
string | content
string |
|---|---|---|---|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/action/KillExecutionAction.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.action;
import azkaban.Constants;
import azkaban.ServiceProvider;
import azkaban.execapp.FlowRunnerManager;
import azkaban.trigger.TriggerAction;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
public class KillExecutionAction implements TriggerAction {
public static final String type = "KillExecutionAction";
private static final Logger logger = Logger
.getLogger(KillExecutionAction.class);
private final String actionId;
private final int execId;
public KillExecutionAction(final String actionId, final int execId) {
this.execId = execId;
this.actionId = actionId;
}
public static KillExecutionAction createFromJson(final Object obj) {
return createFromJson((HashMap<String, Object>) obj);
}
public static KillExecutionAction createFromJson(final HashMap<String, Object> obj) {
final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
final String objType = (String) jsonObj.get("type");
if (!objType.equals(type)) {
throw new RuntimeException("Cannot create action of " + type + " from "
+ objType);
}
final String actionId = (String) jsonObj.get("actionId");
final int execId = Integer.valueOf((String) jsonObj.get("execId"));
return new KillExecutionAction(actionId, execId);
}
@Override
public String getId() {
return this.actionId;
}
@Override
public String getType() {
return type;
}
@Override
public KillExecutionAction fromJson(final Object obj) throws Exception {
return createFromJson((HashMap<String, Object>) obj);
}
@Override
public Object toJson() {
final Map<String, Object> jsonObj = new HashMap<>();
jsonObj.put("actionId", this.actionId);
jsonObj.put("type", type);
jsonObj.put("execId", String.valueOf(this.execId));
return jsonObj;
}
@Override
public void doAction() throws Exception {
logger.info("ready to kill execution " + this.execId);
ServiceProvider.SERVICE_PROVIDER.getInstance(FlowRunnerManager.class)
.cancelFlow(this.execId, Constants.AZKABAN_SLA_CHECKER_USERNAME);
}
@Override
public void setContext(final Map<String, Object> context) {
}
@Override
public String getDescription() {
return type + " for " + this.execId;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/action/KillJobAction.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.action;
import azkaban.ServiceProvider;
import azkaban.execapp.FlowRunnerManager;
import azkaban.trigger.TriggerAction;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
public class KillJobAction implements TriggerAction {
public static final String type = "KillJobAction";
private static final Logger logger = Logger
.getLogger(KillJobAction.class);
private final String actionId;
private final int execId;
private final String jobId;
public KillJobAction(final String actionId, final int execId, final String jobId) {
this.execId = execId;
this.actionId = actionId;
this.jobId = jobId;
}
public static KillJobAction createFromJson(final HashMap<String, Object> obj) {
final Map<String, Object> jsonObj = (HashMap<String, Object>) obj;
final String objType = (String) jsonObj.get("type");
if (!objType.equals(type)) {
throw new RuntimeException("Cannot create action of " + type + " from "
+ objType);
}
final String actionId = (String) jsonObj.get("actionId");
final int execId = Integer.valueOf((String) jsonObj.get("execId"));
final String jobId = (String) jsonObj.get("jobId");
return new KillJobAction(actionId, execId, jobId);
}
@Override
public String getId() {
return this.actionId;
}
@Override
public String getType() {
return type;
}
@SuppressWarnings("unchecked")
@Override
public KillJobAction fromJson(final Object obj) throws Exception {
return createFromJson((HashMap<String, Object>) obj);
}
@Override
public Object toJson() {
final Map<String, Object> jsonObj = new HashMap<>();
jsonObj.put("actionId", this.actionId);
jsonObj.put("type", type);
jsonObj.put("execId", String.valueOf(this.execId));
jsonObj.put("jobId", String.valueOf(this.jobId));
return jsonObj;
}
@Override
public void doAction() throws Exception {
logger.info("ready to do action " + getDescription());
final FlowRunnerManager flowRunnerManager = ServiceProvider.SERVICE_PROVIDER
.getInstance(FlowRunnerManager.class);
flowRunnerManager.cancelJobBySLA(this.execId, this.jobId);
}
@Override
public void setContext(final Map<String, Object> context) {
}
@Override
public String getDescription() {
return type + " for execution " + this.execId + " jobId " + this.jobId;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/BlockingStatus.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.event;
import azkaban.executor.Status;
public class BlockingStatus {
private static final long WAIT_TIME = 300000; // 5 * 60 * 1000
private final int execId;
private final String jobId;
private Status status;
public BlockingStatus(final int execId, final String jobId, final Status initialStatus) {
this.execId = execId;
this.jobId = jobId;
this.status = initialStatus;
}
public Status blockOnFinishedStatus() {
if (this.status == null) {
return null;
}
while (!Status.isStatusFinished(this.status)) {
synchronized (this) {
try {
this.wait(WAIT_TIME);
} catch (final InterruptedException e) {
}
}
}
return this.status;
}
public Status viewStatus() {
return this.status;
}
public void unblock() {
synchronized (this) {
this.notifyAll();
}
}
public void changeStatus(final Status status) {
synchronized (this) {
this.status = status;
if (Status.isStatusFinished(status)) {
unblock();
}
}
}
public int getExecId() {
return this.execId;
}
public String getJobId() {
return this.jobId;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/FlowWatcher.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.event;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableFlowBase;
import azkaban.executor.ExecutableNode;
import azkaban.executor.Status;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.log4j.Logger;
public abstract class FlowWatcher {
private final int execId;
private final Map<String, BlockingStatus> map =
new ConcurrentHashMap<>();
private Logger logger;
private ExecutableFlow flow;
private boolean cancelWatch = false;
public FlowWatcher(final int execId) {
this.execId = execId;
}
public void setFlow(final ExecutableFlow flow) {
this.flow = flow;
}
protected Logger getLogger() {
return this.logger;
}
public void setLogger(final Logger logger) {
this.logger = logger;
}
/**
* Called to fire events to the JobRunner listeners
*/
protected synchronized void handleJobStatusChange(final String jobId, final Status status) {
final BlockingStatus block = this.map.get(jobId);
if (block != null) {
block.changeStatus(status);
}
}
public int getExecId() {
return this.execId;
}
public synchronized BlockingStatus getBlockingStatus(final String jobId) {
if (this.cancelWatch) {
return null;
}
final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
if (node == null) {
return null;
}
BlockingStatus blockingStatus = this.map.get(jobId);
if (blockingStatus == null) {
blockingStatus = new BlockingStatus(this.execId, jobId, node.getStatus());
this.map.put(jobId, blockingStatus);
}
return blockingStatus;
}
public Status peekStatus(final String jobId) {
if (Status.isStatusFinished(this.flow.getStatus())) {
return null;
}
final ExecutableNode node = this.flow.getExecutableNodePath(jobId);
if (node != null) {
ExecutableFlowBase parentFlow = node.getParentFlow();
while (parentFlow != null) {
Status parentStatus = parentFlow.getStatus();
if (parentStatus == Status.SKIPPED || parentStatus == Status.DISABLED) {
return Status.SKIPPED;
}
parentFlow = parentFlow.getParentFlow();
}
return node.getStatus();
}
return null;
}
public synchronized void unblockAllWatches() {
this.logger.info("Unblock all watches on " + this.execId);
this.cancelWatch = true;
for (final BlockingStatus status : this.map.values()) {
this.logger.info("Unblocking " + status.getJobId());
status.changeStatus(Status.SKIPPED);
status.unblock();
}
this.logger.info("Successfully unblocked all watches on " + this.execId);
}
public boolean isWatchCancelled() {
return this.cancelWatch;
}
public abstract void stopWatcher();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/JobCallbackManager.java
|
package azkaban.execapp.event;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
import static azkaban.jobcallback.JobCallbackStatusEnum.COMPLETED;
import static azkaban.jobcallback.JobCallbackStatusEnum.FAILURE;
import static azkaban.jobcallback.JobCallbackStatusEnum.STARTED;
import static azkaban.jobcallback.JobCallbackStatusEnum.SUCCESS;
import azkaban.event.Event;
import azkaban.event.EventData;
import azkaban.event.EventListener;
import azkaban.execapp.JobRunner;
import azkaban.execapp.jmx.JmxJobCallback;
import azkaban.execapp.jmx.JmxJobCallbackMBean;
import azkaban.executor.Status;
import azkaban.jobcallback.JobCallbackStatusEnum;
import azkaban.spi.EventType;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import java.net.InetAddress;
import java.net.URL;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.message.BasicHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Responsible processing job callback properties on job status change events.
*
* When job callback properties are specified, they will be converted to HTTP calls to execute. The
* HTTP requests will be made in asynchronous mode so the caller to the handleEvent method will not
* be block. In addition, the HTTP calls will be configured to time appropriately for connection
* request, creating connection, and socket timeout.
*
* The HTTP request and response will be logged out the job's log for debugging and traceability
* purpose.
*
* @author hluu
*/
public class JobCallbackManager implements EventListener {
private static final Logger logger = LoggerFactory.getLogger(JobCallbackManager.class);
private static final JobCallbackStatusEnum[] ON_COMPLETION_JOB_CALLBACK_STATUS =
{SUCCESS, FAILURE, COMPLETED};
private static boolean isInitialized = false;
private static JobCallbackManager instance;
private static int maxNumCallBack = 3;
private final JmxJobCallbackMBean callbackMbean;
private final String azkabanHostName;
private final SimpleDateFormat gmtDateFormatter;
private JobCallbackManager(final Props props) {
maxNumCallBack = props.getInt("jobcallback.max_count", maxNumCallBack);
// initialize the request maker
JobCallbackRequestMaker.initialize(props);
this.callbackMbean =
new JmxJobCallback(JobCallbackRequestMaker.getInstance()
.getJobcallbackMetrics());
this.azkabanHostName = getAzkabanHostName(props);
this.gmtDateFormatter = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
this.gmtDateFormatter.setTimeZone(TimeZone.getTimeZone("GMT"));
logger.info("Initialization completed " + getClass().getName());
logger.info("azkabanHostName " + this.azkabanHostName);
}
public static void initialize(final Props props) {
if (isInitialized) {
logger.info("Already initialized");
return;
}
logger.info("Initializing");
instance = new JobCallbackManager(props);
isInitialized = true;
}
public static boolean isInitialized() {
return isInitialized;
}
public static JobCallbackManager getInstance() {
if (!isInitialized) {
throw new IllegalStateException(JobCallbackManager.class.getName()
+ " has not been initialized");
}
return instance;
}
public JmxJobCallbackMBean getJmxJobCallbackMBean() {
return this.callbackMbean;
}
@Override
public void handleEvent(final Event event) {
if (!isInitialized) {
return;
}
if (event.getRunner() instanceof JobRunner) {
try {
if (event.getType() == EventType.JOB_STARTED) {
processJobCallOnStart(event);
} else if (event.getType() == EventType.JOB_FINISHED) {
processJobCallOnFinish(event);
}
} catch (final Throwable e) {
// Use job runner logger so user can see the issue in their job log
final JobRunner jobRunner = (JobRunner) event.getRunner();
jobRunner.getLogger().error(
"Encountered error while handling job callback event", e);
this.logger.warn("Error during handleEvent for event {}, execId: {}",
event.getData().getStatus(), jobRunner.getNode().getParentFlow().getExecutionId());
this.logger.warn(e.getMessage(), e);
}
} else {
logger.warn("((( Got an unsupported runner: "
+ event.getRunner().getClass().getName() + " )))");
}
}
private void processJobCallOnFinish(final Event event) {
final JobRunner jobRunner = (JobRunner) event.getRunner();
final EventData eventData = event.getData();
if (!JobCallbackUtil.isThereJobCallbackProperty(jobRunner.getProps(),
ON_COMPLETION_JOB_CALLBACK_STATUS)) {
this.logger.info("No callback property for {}, exec id: {}", eventData.getStatus(),
jobRunner.getNode().getParentFlow().getExecutionId());
return;
}
// don't want to waste time resolving properties if there are no
// callback properties to parse
final Props props = PropsUtils.resolveProps(jobRunner.getProps());
final Map<String, String> contextInfo =
JobCallbackUtil.buildJobContextInfoMap(event, this.azkabanHostName);
JobCallbackStatusEnum jobCallBackStatusEnum = null;
final Status jobStatus = eventData.getStatus();
if (jobStatus == Status.SUCCEEDED) {
jobCallBackStatusEnum = JobCallbackStatusEnum.SUCCESS;
} else if (jobStatus == Status.FAILED
|| jobStatus == Status.FAILED_FINISHING || jobStatus == Status.KILLED) {
jobCallBackStatusEnum = JobCallbackStatusEnum.FAILURE;
} else {
this.logger.info("!!!! WE ARE NOT SUPPORTING JOB CALLBACKS FOR STATUS: "
+ jobStatus);
jobCallBackStatusEnum = null; // to be explicit
}
final String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
if (jobCallBackStatusEnum != null) {
final List<HttpRequestBase> jobCallbackHttpRequests =
JobCallbackUtil.parseJobCallbackProperties(props,
jobCallBackStatusEnum, contextInfo, maxNumCallBack, this.logger);
if (!jobCallbackHttpRequests.isEmpty()) {
final String msg =
String.format("Making %d job callbacks for status: %s",
jobCallbackHttpRequests.size(), jobCallBackStatusEnum.name());
this.logger.info(msg);
addDefaultHeaders(jobCallbackHttpRequests);
JobCallbackRequestMaker.getInstance().makeHttpRequest(jobId, this.logger,
jobCallbackHttpRequests);
} else {
this.logger.info("No job callbacks for status: " + jobCallBackStatusEnum);
}
}
// for completed status
final List<HttpRequestBase> httpRequestsForCompletedStatus =
JobCallbackUtil.parseJobCallbackProperties(props, COMPLETED,
contextInfo, maxNumCallBack, this.logger);
// now make the call
if (!httpRequestsForCompletedStatus.isEmpty()) {
this.logger.info("Making " + httpRequestsForCompletedStatus.size()
+ " job callbacks for status: " + COMPLETED);
addDefaultHeaders(httpRequestsForCompletedStatus);
JobCallbackRequestMaker.getInstance().makeHttpRequest(jobId, this.logger,
httpRequestsForCompletedStatus);
} else {
this.logger.info("No job callbacks for status: " + COMPLETED);
}
}
private void processJobCallOnStart(final Event event) {
final JobRunner jobRunner = (JobRunner) event.getRunner();
if (JobCallbackUtil.isThereJobCallbackProperty(jobRunner.getProps(),
JobCallbackStatusEnum.STARTED)) {
// don't want to waste time resolving properties if there are
// callback properties to parse
final Props props = PropsUtils.resolveProps(jobRunner.getProps());
final Map<String, String> contextInfo =
JobCallbackUtil.buildJobContextInfoMap(event, this.azkabanHostName);
final List<HttpRequestBase> jobCallbackHttpRequests =
JobCallbackUtil.parseJobCallbackProperties(props, STARTED,
contextInfo, maxNumCallBack, this.logger);
final String jobId = contextInfo.get(CONTEXT_JOB_TOKEN);
final String msg =
String.format("Making %d job callbacks for job %s for jobStatus: %s",
jobCallbackHttpRequests.size(), jobId, STARTED.name());
jobRunner.getLogger().info(msg);
addDefaultHeaders(jobCallbackHttpRequests);
JobCallbackRequestMaker.getInstance().makeHttpRequest(jobId,
this.logger, jobCallbackHttpRequests);
}
}
private String getAzkabanHostName(final Props props) {
final String baseURL = props.get(JobRunner.AZKABAN_WEBSERVER_URL);
try {
String hostName = InetAddress.getLocalHost().getHostName();
if (baseURL != null) {
final URL url = new URL(baseURL);
hostName = url.getHost() + ":" + url.getPort();
}
return hostName;
} catch (final Exception e) {
throw new IllegalStateException(
"Encountered while getting azkaban host name", e);
}
}
private void addDefaultHeaders(final List<HttpRequestBase> httpRequests) {
if (httpRequests == null) {
return;
}
final SimpleDateFormat format =
new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss z");
format.setTimeZone(TimeZone.getTimeZone("GMT"));
for (final HttpRequestBase httpRequest : httpRequests) {
httpRequest.addHeader(new BasicHeader("Date", this.gmtDateFormatter
.format(new Date())));
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/JobCallbackRequestMaker.java
|
package azkaban.execapp.event;
import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT;
import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_CONNECTION_TIMEOUT;
import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_RESPONSE_WAIT_TIMEOUT;
import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_SOCKET_TIMEOUT;
import static azkaban.Constants.JobCallbackProperties.JOBCALLBACK_THREAD_POOL_SIZE;
import azkaban.utils.Props;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.SocketTimeoutException;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.apache.http.Header;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.HttpClient;
import org.apache.http.client.ResponseHandler;
import org.apache.http.client.config.RequestConfig;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.client.protocol.HttpClientContext;
import org.apache.http.impl.client.FutureRequestExecutionMetrics;
import org.apache.http.impl.client.FutureRequestExecutionService;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.http.impl.client.HttpRequestFutureTask;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Responsible for making the job callback HTTP requests.
*
* One of the requirements is to log out the request information and response using the given
* logger, which should be the job logger.
*
* @author hluu
*/
public class JobCallbackRequestMaker {
private static final Logger logger = LoggerFactory.getLogger(JobCallbackRequestMaker.class);
private static final int DEFAULT_TIME_OUT_MS = 3000;
private static final int DEFAULT_RESPONSE_WAIT_TIME_OUT_MS = 5000;
private static final int MAX_RESPONSE_LINE_TO_PRINT = 50;
private static final int DEFAULT_THREAD_POOL_SIZE = 10;
private static JobCallbackRequestMaker instance;
private static boolean isInitialized = false;
private final FutureRequestExecutionService futureRequestExecutionService;
private int responseWaitTimeoutMS = -1;
private JobCallbackRequestMaker(final Props props) {
final int connectionRequestTimeout =
props.getInt(JOBCALLBACK_CONNECTION_REQUEST_TIMEOUT, DEFAULT_TIME_OUT_MS);
final int connectionTimeout = props.getInt(JOBCALLBACK_CONNECTION_TIMEOUT, DEFAULT_TIME_OUT_MS);
final int socketTimeout = props.getInt(JOBCALLBACK_SOCKET_TIMEOUT, DEFAULT_TIME_OUT_MS);
this.responseWaitTimeoutMS =
props.getInt(JOBCALLBACK_RESPONSE_WAIT_TIMEOUT, DEFAULT_RESPONSE_WAIT_TIME_OUT_MS);
logger.info("responseWaitTimeoutMS: " + this.responseWaitTimeoutMS);
final RequestConfig requestConfig =
RequestConfig.custom()
.setConnectionRequestTimeout(connectionRequestTimeout)
.setConnectTimeout(connectionTimeout)
.setSocketTimeout(socketTimeout).build();
logger.info("Global request configuration " + requestConfig.toString());
final HttpClient httpClient =
HttpClientBuilder.create().setDefaultRequestConfig(requestConfig)
.build();
final int jobCallbackThreadPoolSize =
props.getInt(JOBCALLBACK_THREAD_POOL_SIZE, DEFAULT_THREAD_POOL_SIZE);
logger.info("Jobcall thread pool size: " + jobCallbackThreadPoolSize);
final ExecutorService executorService =
Executors.newFixedThreadPool(jobCallbackThreadPoolSize);
this.futureRequestExecutionService =
new FutureRequestExecutionService(httpClient, executorService);
}
public static void initialize(final Props props) {
if (props == null) {
throw new NullPointerException("props argument can't be null");
}
if (isInitialized) {
return;
}
instance = new JobCallbackRequestMaker(props);
isInitialized = true;
logger.info("Initialization for " + JobCallbackRequestMaker.class.getName()
+ " is completed");
}
public static boolean isInitialized() {
return isInitialized;
}
public static JobCallbackRequestMaker getInstance() {
if (!isInitialized) {
throw new IllegalStateException(JobCallbackRequestMaker.class.getName()
+ " hasn't initialzied");
}
return instance;
}
public FutureRequestExecutionMetrics getJobcallbackMetrics() {
return this.futureRequestExecutionService.metrics();
}
public void makeHttpRequest(final String jobId, final Logger logger,
final List<HttpRequestBase> httpRequestList) {
if (httpRequestList == null || httpRequestList.isEmpty()) {
logger.info("No HTTP requests to make");
return;
}
for (final HttpRequestBase httpRequest : httpRequestList) {
logger.debug("Job callback http request: " + httpRequest.toString());
logger.debug("headers [");
for (final Header header : httpRequest.getAllHeaders()) {
logger.debug(String.format(" %s : %s", header.getName(),
header.getValue()));
}
logger.debug("]");
final HttpRequestFutureTask<Integer> task =
this.futureRequestExecutionService.execute(httpRequest,
HttpClientContext.create(), new LoggingResponseHandler(logger));
try {
// get with timeout
final Integer statusCode =
task.get(this.responseWaitTimeoutMS, TimeUnit.MILLISECONDS);
logger.info("http callback status code: " + statusCode);
} catch (final TimeoutException timeOutEx) {
logger
.warn("Job callback target took longer "
+ (this.responseWaitTimeoutMS / 1000) + " seconds to respond",
timeOutEx);
} catch (final ExecutionException ee) {
if (ee.getCause() instanceof SocketTimeoutException) {
logger.warn("Job callback target took longer "
+ (this.responseWaitTimeoutMS / 1000) + " seconds to respond", ee);
} else {
logger.warn(
"Encountered error while waiting for job callback to complete",
ee);
}
} catch (final Throwable e) {
logger.warn(
"Encountered error while waiting for job callback to complete for: " + jobId,
e.getMessage());
}
}
}
/**
* Response handler for logging job callback response using the given logger instance
*
* @author hluu
*/
private static final class LoggingResponseHandler implements
ResponseHandler<Integer> {
private final Logger logger;
public LoggingResponseHandler(final Logger logger) {
if (logger == null) {
throw new NullPointerException("Argument logger can't be null");
}
this.logger = logger;
}
@Override
public Integer handleResponse(final HttpResponse response)
throws ClientProtocolException, IOException {
final int statusCode = response.getStatusLine().getStatusCode();
BufferedReader bufferedReader = null;
try {
final HttpEntity responseEntity = response.getEntity();
if (responseEntity != null) {
bufferedReader =
new BufferedReader(new InputStreamReader(
responseEntity.getContent(), StandardCharsets.UTF_8));
String line = "";
int lineCount = 0;
this.logger.info("HTTP response [");
while ((line = bufferedReader.readLine()) != null) {
this.logger.info(line);
lineCount++;
if (lineCount > MAX_RESPONSE_LINE_TO_PRINT) {
break;
}
}
this.logger.info("]");
} else {
this.logger.info("No response");
}
} catch (final Throwable t) {
this.logger.warn(
"Encountered error while logging out job callback response", t);
} finally {
if (bufferedReader != null) {
try {
bufferedReader.close();
} catch (final IOException ex) {
// don't care
}
}
}
return statusCode;
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/JobCallbackUtil.java
|
package azkaban.execapp.event;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_EXECUTION_ID_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_FLOW_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_STATUS_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_JOB_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_PROJECT_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.CONTEXT_SERVER_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.FIRST_JOB_CALLBACK_URL_TEMPLATE;
import static azkaban.jobcallback.JobCallbackConstants.HEADER_ELEMENT_DELIMITER;
import static azkaban.jobcallback.JobCallbackConstants.HEADER_NAME_VALUE_DELIMITER;
import static azkaban.jobcallback.JobCallbackConstants.HTTP_GET;
import static azkaban.jobcallback.JobCallbackConstants.HTTP_POST;
import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_BODY_TEMPLATE;
import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_REQUEST_HEADERS_TEMPLATE;
import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_REQUEST_METHOD_TEMPLATE;
import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_URL_TEMPLATE;
import static azkaban.jobcallback.JobCallbackConstants.SEQUENCE_TOKEN;
import static azkaban.jobcallback.JobCallbackConstants.STATUS_TOKEN;
import azkaban.event.Event;
import azkaban.event.EventData;
import azkaban.execapp.JobRunner;
import azkaban.executor.ExecutableNode;
import azkaban.jobcallback.JobCallbackStatusEnum;
import azkaban.utils.Props;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.http.Header;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpRequestBase;
import org.apache.http.entity.StringEntity;
import org.apache.http.message.BasicHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class JobCallbackUtil {
private static final Logger logger = LoggerFactory.getLogger(JobCallbackUtil.class);
private static final Map<JobCallbackStatusEnum, String> firstJobcallbackPropertyMap =
new HashMap<>(
JobCallbackStatusEnum.values().length);
static {
for (final JobCallbackStatusEnum statusEnum : JobCallbackStatusEnum.values()) {
firstJobcallbackPropertyMap.put(statusEnum,
replaceStatusToken(FIRST_JOB_CALLBACK_URL_TEMPLATE, statusEnum));
}
}
/**
* Use to quickly determine if there is a job callback related property in the Props.
*
* @return true if there is job callback related property
*/
public static boolean isThereJobCallbackProperty(final Props props,
final JobCallbackStatusEnum status) {
if (props == null || status == null) {
throw new NullPointerException("One of the argument is null");
}
final String jobCallBackUrl = firstJobcallbackPropertyMap.get(status);
return props.containsKey(jobCallBackUrl);
}
public static boolean isThereJobCallbackProperty(final Props props,
final JobCallbackStatusEnum... jobStatuses) {
if (props == null || jobStatuses == null) {
throw new NullPointerException("One of the argument is null");
}
for (final JobCallbackStatusEnum jobStatus : jobStatuses) {
if (JobCallbackUtil.isThereJobCallbackProperty(props, jobStatus)) {
return true;
}
}
return false;
}
public static List<HttpRequestBase> parseJobCallbackProperties(final Props props,
final JobCallbackStatusEnum status, final Map<String, String> contextInfo,
final int maxNumCallback) {
return parseJobCallbackProperties(props, status, contextInfo,
maxNumCallback, logger);
}
/**
* This method is responsible for parsing job call URL properties and convert them into a list of
* HttpRequestBase, which callers can use to execute.
*
* In addition to parsing, it will also replace the tokens with actual values.
*
* @return List<HttpRequestBase> - empty if no job callback related properties
*/
public static List<HttpRequestBase> parseJobCallbackProperties(final Props props,
final JobCallbackStatusEnum status, final Map<String, String> contextInfo,
final int maxNumCallback, final Logger privateLogger) {
String callbackUrl = null;
if (!isThereJobCallbackProperty(props, status)) {
// short circuit
return Collections.emptyList();
}
final List<HttpRequestBase> result = new ArrayList<>();
// replace property templates with status
final String jobCallBackUrlKey =
replaceStatusToken(JOB_CALLBACK_URL_TEMPLATE, status);
final String requestMethod =
replaceStatusToken(JOB_CALLBACK_REQUEST_METHOD_TEMPLATE, status);
final String httpBodyKey = replaceStatusToken(JOB_CALLBACK_BODY_TEMPLATE, status);
final String headersKey =
replaceStatusToken(JOB_CALLBACK_REQUEST_HEADERS_TEMPLATE, status);
for (int sequence = 1; sequence <= maxNumCallback; sequence++) {
HttpRequestBase httpRequest = null;
final String sequenceStr = Integer.toString(sequence);
// callback url
final String callbackUrlKey =
jobCallBackUrlKey.replace(SEQUENCE_TOKEN, sequenceStr);
callbackUrl = props.get(callbackUrlKey);
if (callbackUrl == null || callbackUrl.length() == 0) {
// no more needs to done
break;
} else {
final String callbackUrlWithTokenReplaced =
replaceTokens(callbackUrl, contextInfo, true);
final String requestMethodKey =
requestMethod.replace(SEQUENCE_TOKEN, sequenceStr);
final String method = props.getString(requestMethodKey, HTTP_GET);
if (HTTP_POST.equals(method)) {
final String postBodyKey = httpBodyKey.replace(SEQUENCE_TOKEN, sequenceStr);
final String httpBodyValue = props.get(postBodyKey);
if (httpBodyValue == null) {
// missing body for POST, not good
// update the wiki about skipping callback url if body is missing
privateLogger.warn("Missing value for key: " + postBodyKey
+ " skipping job callback '" + callbackUrl + " for job "
+ contextInfo.get(CONTEXT_JOB_TOKEN));
} else {
// put together an URL
final HttpPost httpPost = new HttpPost(callbackUrlWithTokenReplaced);
final String postActualBody =
replaceTokens(httpBodyValue, contextInfo, false);
privateLogger.info("postActualBody: " + postActualBody);
httpPost.setEntity(createStringEntity(postActualBody));
httpRequest = httpPost;
}
} else if (HTTP_GET.equals(method)) {
// GET
httpRequest = new HttpGet(callbackUrlWithTokenReplaced);
} else {
privateLogger.warn("Unsupported request method: " + method
+ ". Only POST and GET are supported");
}
final String headersKeyPerSequence =
headersKey.replace(SEQUENCE_TOKEN, sequenceStr);
final String headersValue = props.get(headersKeyPerSequence);
privateLogger.info("headers: " + headersValue);
final Header[] headers = parseHttpHeaders(headersValue);
if (headers != null) {
httpRequest.setHeaders(headers);
privateLogger.info("# of headers found: " + headers.length);
}
result.add(httpRequest);
}
}
return result;
}
/**
* Parse headers
*
* @return null if headers is null or empty
*/
public static Header[] parseHttpHeaders(final String headers) {
if (headers == null || headers.length() == 0) {
return null;
}
final String[] headerArray = headers.split(HEADER_ELEMENT_DELIMITER);
final List<Header> headerList = new ArrayList<>(headerArray.length);
for (int i = 0; i < headerArray.length; i++) {
final String headerPair = headerArray[i];
final int index = headerPair.indexOf(HEADER_NAME_VALUE_DELIMITER);
if (index != -1) {
headerList.add(new BasicHeader(headerPair.substring(0, index),
headerPair.substring(index + 1)));
}
}
return headerList.toArray(new BasicHeader[0]);
}
private static String replaceStatusToken(final String template,
final JobCallbackStatusEnum status) {
return template.replaceAll(STATUS_TOKEN, status.name().toLowerCase());
}
private static StringEntity createStringEntity(final String str) {
try {
return new StringEntity(str);
} catch (final UnsupportedEncodingException e) {
throw new RuntimeException("Encoding not supported", e);
}
}
/**
* This method takes the job context info. and put the values into a map with keys as the tokens.
*
* @return Map<String,String>
*/
public static Map<String, String> buildJobContextInfoMap(final Event event,
final String server) {
if (event.getRunner() instanceof JobRunner) {
final JobRunner jobRunner = (JobRunner) event.getRunner();
final ExecutableNode node = jobRunner.getNode();
final EventData eventData = event.getData();
final String projectName = node.getParentFlow().getProjectName();
final String flowName = node.getParentFlow().getFlowId();
final String executionId =
String.valueOf(node.getParentFlow().getExecutionId());
final String jobId = node.getId();
final Map<String, String> result = new HashMap<>();
result.put(CONTEXT_SERVER_TOKEN, server);
result.put(CONTEXT_PROJECT_TOKEN, projectName);
result.put(CONTEXT_FLOW_TOKEN, flowName);
result.put(CONTEXT_EXECUTION_ID_TOKEN, executionId);
result.put(CONTEXT_JOB_TOKEN, jobId);
result.put(CONTEXT_JOB_STATUS_TOKEN, eventData.getStatus().name().toLowerCase());
/*
* if (node.getStatus() == Status.SUCCEEDED || node.getStatus() ==
* Status.FAILED) { result.put(JOB_STATUS_TOKEN,
* node.getStatus().name().toLowerCase()); } else if (node.getStatus() ==
* Status.PREPARING) { result.put(JOB_STATUS_TOKEN, "started"); }
*/
return result;
} else {
throw new IllegalArgumentException("Provided event is not a job event");
}
}
/**
* Replace the supported tokens in the URL with values in the contextInfo. This will also make
* sure the values are HTTP encoded.
*
* @param withEncoding - whether the token values will be HTTP encoded
* @return String - value with tokens replaced with values
*/
public static String replaceTokens(final String value,
final Map<String, String> contextInfo, final boolean withEncoding) {
String result = value;
String tokenValue =
encodeQueryParam(contextInfo.get(CONTEXT_SERVER_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_SERVER_TOKEN), tokenValue);
tokenValue = encodeQueryParam(contextInfo.get(CONTEXT_PROJECT_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_PROJECT_TOKEN), tokenValue);
tokenValue = encodeQueryParam(contextInfo.get(CONTEXT_FLOW_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_FLOW_TOKEN), tokenValue);
tokenValue = encodeQueryParam(contextInfo.get(CONTEXT_JOB_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_JOB_TOKEN), tokenValue);
tokenValue =
encodeQueryParam(contextInfo.get(CONTEXT_EXECUTION_ID_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_EXECUTION_ID_TOKEN), tokenValue);
tokenValue =
encodeQueryParam(contextInfo.get(CONTEXT_JOB_STATUS_TOKEN), withEncoding);
result = result.replaceAll(Pattern.quote(CONTEXT_JOB_STATUS_TOKEN), tokenValue);
return result;
}
private static String encodeQueryParam(final String str, final boolean withEncoding) {
if (!withEncoding) {
return str;
}
try {
return URLEncoder.encode(str, "UTF-8");
} catch (final UnsupportedEncodingException e) {
throw new IllegalArgumentException(
"Encountered problem during encoding:", e);
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/LocalFlowWatcher.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.event;
import azkaban.event.Event;
import azkaban.event.EventData;
import azkaban.event.EventListener;
import azkaban.execapp.FlowRunner;
import azkaban.execapp.JobRunner;
import azkaban.executor.ExecutableNode;
import azkaban.spi.EventType;
public class LocalFlowWatcher extends FlowWatcher {
private final LocalFlowWatcherListener watcherListener;
private FlowRunner runner;
private boolean isShutdown = false;
public LocalFlowWatcher(final FlowRunner runner) {
super(runner.getExecutableFlow().getExecutionId());
super.setFlow(runner.getExecutableFlow());
this.watcherListener = new LocalFlowWatcherListener();
this.runner = runner;
runner.addListener(this.watcherListener);
}
@Override
public void stopWatcher() {
// Just freeing stuff
if (this.isShutdown) {
return;
}
this.isShutdown = true;
this.runner.removeListener(this.watcherListener);
this.runner = null;
getLogger().info("Stopping watcher, and unblocking pipeline");
super.unblockAllWatches();
}
public class LocalFlowWatcherListener implements EventListener {
@Override
public void handleEvent(final Event event) {
if (event.getType() == EventType.JOB_FINISHED) {
if (event.getRunner() instanceof FlowRunner) {
// The flow runner will finish a job without it running
final EventData eventData = event.getData();
if (eventData.getNestedId() != null) {
handleJobStatusChange(eventData.getNestedId(), eventData.getStatus());
}
} else if (event.getRunner() instanceof JobRunner) {
// A job runner is finished
final JobRunner runner = (JobRunner) event.getRunner();
final ExecutableNode node = runner.getNode();
getLogger().info(node + " looks like " + node.getStatus());
handleJobStatusChange(node.getNestedId(), node.getStatus());
}
} else if (event.getType() == EventType.FLOW_FINISHED) {
stopWatcher();
}
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/event/RemoteFlowWatcher.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.event;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutorLoader;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import java.util.ArrayList;
import java.util.Map;
public class RemoteFlowWatcher extends FlowWatcher {
private final static long CHECK_INTERVAL_MS = 60000; // 60 * 1000
private int execId;
private ExecutorLoader loader;
private ExecutableFlow flow;
private RemoteUpdaterThread thread;
private boolean isShutdown = false;
// Every minute
private long checkIntervalMs = CHECK_INTERVAL_MS;
public RemoteFlowWatcher(final int execId, final ExecutorLoader loader) {
this(execId, loader, CHECK_INTERVAL_MS);
}
public RemoteFlowWatcher(final int execId, final ExecutorLoader loader, final long interval) {
super(execId);
this.checkIntervalMs = interval;
try {
this.flow = loader.fetchExecutableFlow(execId);
} catch (final ExecutorManagerException e) {
return;
}
super.setFlow(this.flow);
this.loader = loader;
this.execId = execId;
if (this.flow != null) {
this.thread = new RemoteUpdaterThread();
this.thread.setName("Remote-watcher-flow-" + execId);
this.thread.start();
}
}
@Override
public synchronized void stopWatcher() {
if (this.isShutdown) {
return;
}
this.isShutdown = true;
if (this.thread != null) {
this.thread.interrupt();
}
super.unblockAllWatches();
this.loader = null;
this.flow = null;
}
private class RemoteUpdaterThread extends Thread {
@Override
public void run() {
do {
ExecutableFlow updateFlow = null;
try {
updateFlow = RemoteFlowWatcher.this.loader.fetchExecutableFlow(
RemoteFlowWatcher.this.execId);
} catch (final ExecutorManagerException e) {
e.printStackTrace();
RemoteFlowWatcher.this.isShutdown = true;
}
long updateTime = 0;
if (RemoteFlowWatcher.this.flow == null) {
RemoteFlowWatcher.this.flow = updateFlow;
} else {
final Map<String, Object> updateData =
updateFlow.toUpdateObject(updateTime);
final ArrayList<ExecutableNode> updatedNodes =
new ArrayList<>();
RemoteFlowWatcher.this.flow.applyUpdateObject(updateData, updatedNodes);
RemoteFlowWatcher.this.flow.setStatus(updateFlow.getStatus());
RemoteFlowWatcher.this.flow.setEndTime(updateFlow.getEndTime());
RemoteFlowWatcher.this.flow.setUpdateTime(updateFlow.getUpdateTime());
for (final ExecutableNode node : updatedNodes) {
handleJobStatusChange(node.getNestedId(), node.getStatus());
}
updateTime = RemoteFlowWatcher.this.flow.getUpdateTime();
}
if (Status.isStatusFinished(RemoteFlowWatcher.this.flow.getStatus())) {
RemoteFlowWatcher.this.isShutdown = true;
} else {
synchronized (this) {
try {
wait(RemoteFlowWatcher.this.checkIntervalMs);
} catch (final InterruptedException e) {
}
}
}
} while (!RemoteFlowWatcher.this.isShutdown);
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxFlowRampManager.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.jmx;
import azkaban.execapp.FlowRampManager;
public class JmxFlowRampManager implements JmxFlowRampManagerMBean {
private final FlowRampManager manager;
public JmxFlowRampManager(final FlowRampManager manager) {
this.manager = manager;
}
@Override
public int getNumOfRamps() {
return this.manager.getNumOfRamps();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxFlowRampManagerMBean.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.jmx;
import azkaban.jmx.DisplayName;
public interface JmxFlowRampManagerMBean {
@DisplayName("OPERATION: getNumOfRamps")
public int getNumOfRamps();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxFlowRunnerManager.java
|
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.jmx;
import azkaban.execapp.FlowRunnerManager;
public class JmxFlowRunnerManager implements JmxFlowRunnerManagerMBean {
private final FlowRunnerManager manager;
public JmxFlowRunnerManager(final FlowRunnerManager manager) {
this.manager = manager;
}
@Override
public long getLastCleanerThreadCheckTime() {
return this.manager.getLastCleanerThreadCheckTime();
}
@Override
public boolean isCleanerThreadActive() {
return this.manager.isCleanerThreadActive();
}
@Override
public String getCleanerThreadState() {
return this.manager.getCleanerThreadState().toString();
}
@Override
public boolean isExecutorThreadPoolShutdown() {
return this.manager.isExecutorThreadPoolShutdown();
}
@Override
public int getNumRunningFlows() {
return this.manager.getNumRunningFlows();
}
@Override
public int getNumQueuedFlows() {
return this.manager.getNumQueuedFlows();
}
@Override
public String getRunningFlows() {
return this.manager.getRunningFlowIds();
}
@Override
public String getQueuedFlows() {
return this.manager.getQueuedFlowIds();
}
@Override
public int getMaxNumRunningFlows() {
return this.manager.getMaxNumRunningFlows();
}
@Override
public int getMaxQueuedFlows() {
return this.manager.getTheadPoolQueueSize();
}
@Override
public int getTotalNumExecutedFlows() {
return this.manager.getTotalNumExecutedFlows();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxFlowRunnerManagerMBean.java
|
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.jmx;
import azkaban.jmx.DisplayName;
public interface JmxFlowRunnerManagerMBean {
@DisplayName("OPERATION: getLastCleanerThreadCheckTime")
public long getLastCleanerThreadCheckTime();
@DisplayName("OPERATION: isCleanerThreadActive")
public boolean isCleanerThreadActive();
@DisplayName("OPERATION: getCleanerThreadState")
public String getCleanerThreadState();
@DisplayName("OPERATION: isExecutorThreadPoolShutdown")
public boolean isExecutorThreadPoolShutdown();
@DisplayName("OPERATION: getNumRunningFlows")
public int getNumRunningFlows();
@DisplayName("OPERATION: getNumQueuedFlows")
public int getNumQueuedFlows();
@DisplayName("OPERATION: getRunningFlows")
public String getRunningFlows();
@DisplayName("OPERATION: getQueuedFlows")
public String getQueuedFlows();
@DisplayName("OPERATION: getMaxNumRunningFlows")
public int getMaxNumRunningFlows();
@DisplayName("OPERATION: getMaxQueuedFlows")
public int getMaxQueuedFlows();
@DisplayName("OPERATION: getTotalNumExecutedFlows")
public int getTotalNumExecutedFlows();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxJobCallback.java
|
package azkaban.execapp.jmx;
import org.apache.http.impl.client.FutureRequestExecutionMetrics;
public class JmxJobCallback implements JmxJobCallbackMBean {
private final FutureRequestExecutionMetrics jobCallbackMetrics;
public JmxJobCallback(final FutureRequestExecutionMetrics jobCallbackMetrics) {
this.jobCallbackMetrics = jobCallbackMetrics;
}
@Override
public long getNumJobCallbacks() {
return this.jobCallbackMetrics.getRequestCount();
}
@Override
public long getNumSuccessfulJobCallbacks() {
return this.jobCallbackMetrics.getSuccessfulConnectionCount();
}
@Override
public long getNumFailedJobCallbacks() {
return this.jobCallbackMetrics.getFailedConnectionCount();
}
@Override
public long getNumActiveJobCallbacks() {
return this.jobCallbackMetrics.getActiveConnectionCount();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxJobCallbackMBean.java
|
package azkaban.execapp.jmx;
import azkaban.jmx.DisplayName;
public interface JmxJobCallbackMBean {
@DisplayName("OPERATION: getNumJobCallbacks")
public long getNumJobCallbacks();
@DisplayName("OPERATION: getNumSuccessfulJobCallbacks")
public long getNumSuccessfulJobCallbacks();
@DisplayName("OPERATION: getNumFailedJobCallbacks")
public long getNumFailedJobCallbacks();
@DisplayName("OPERATION: getNumActiveJobCallbacks")
public long getNumActiveJobCallbacks();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxJobMBeanManager.java
|
package azkaban.execapp.jmx;
import azkaban.event.Event;
import azkaban.event.EventData;
import azkaban.event.EventListener;
import azkaban.execapp.JobRunner;
import azkaban.executor.ExecutableNode;
import azkaban.executor.Status;
import azkaban.spi.EventType;
import azkaban.utils.Props;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.log4j.Logger;
/**
* Responsible keeping track of job related MBean attributes through listening to job related
* events.
*
* @author hluu
*/
public class JmxJobMBeanManager implements JmxJobMXBean, EventListener {
private static final Logger logger = Logger
.getLogger(JmxJobMBeanManager.class);
private static final JmxJobMBeanManager INSTANCE = new JmxJobMBeanManager();
private final AtomicInteger runningJobCount = new AtomicInteger(0);
private final AtomicInteger totalExecutedJobCount = new AtomicInteger(0);
private final AtomicInteger totalFailedJobCount = new AtomicInteger(0);
private final AtomicInteger totalSucceededJobCount = new AtomicInteger(0);
private final Map<String, AtomicInteger> jobTypeFailureMap =
new HashMap<>();
private final Map<String, AtomicInteger> jobTypeSucceededMap =
new HashMap<>();
private boolean initialized;
private JmxJobMBeanManager() {
}
public static JmxJobMBeanManager getInstance() {
return INSTANCE;
}
public void initialize(final Props props) {
logger.info("Initializing " + getClass().getName());
this.initialized = true;
}
@Override
public int getNumRunningJobs() {
return this.runningJobCount.get();
}
@Override
public int getTotalNumExecutedJobs() {
return this.totalExecutedJobCount.get();
}
@Override
public int getTotalFailedJobs() {
return this.totalFailedJobCount.get();
}
@Override
public int getTotalSucceededJobs() {
return this.totalSucceededJobCount.get();
}
@Override
public Map<String, Integer> getTotalSucceededJobsByJobType() {
return convertMapValueToInteger(this.jobTypeSucceededMap);
}
@Override
public Map<String, Integer> getTotalFailedJobsByJobType() {
return convertMapValueToInteger(this.jobTypeFailureMap);
}
private Map<String, Integer> convertMapValueToInteger(
final Map<String, AtomicInteger> map) {
final Map<String, Integer> result = new HashMap<>(map.size());
for (final Map.Entry<String, AtomicInteger> entry : map.entrySet()) {
result.put(entry.getKey(), entry.getValue().intValue());
}
return result;
}
@Override
public void handleEvent(final Event event) {
if (!this.initialized) {
throw new RuntimeException("JmxJobMBeanManager has not been initialized");
}
if (event.getRunner() instanceof JobRunner) {
final JobRunner jobRunner = (JobRunner) event.getRunner();
final EventData eventData = event.getData();
final ExecutableNode node = jobRunner.getNode();
if (logger.isDebugEnabled()) {
logger.debug("*** got " + event.getType() + " " + node.getId() + " "
+ event.getRunner().getClass().getName() + " status: "
+ eventData.getStatus());
}
if (event.getType() == EventType.JOB_STARTED) {
this.runningJobCount.incrementAndGet();
} else if (event.getType() == EventType.JOB_FINISHED) {
this.totalExecutedJobCount.incrementAndGet();
if (this.runningJobCount.intValue() > 0) {
this.runningJobCount.decrementAndGet();
} else {
logger.warn("runningJobCount not messed up, it is already zero "
+ "and we are trying to decrement on job event "
+ EventType.JOB_FINISHED);
}
if (eventData.getStatus() == Status.FAILED) {
this.totalFailedJobCount.incrementAndGet();
} else if (eventData.getStatus() == Status.SUCCEEDED) {
this.totalSucceededJobCount.incrementAndGet();
}
handleJobFinishedCount(eventData.getStatus(), node.getType());
}
} else {
logger.warn("((((((((( Got a different runner: "
+ event.getRunner().getClass().getName());
}
}
private void handleJobFinishedCount(final Status status, final String jobType) {
switch (status) {
case FAILED:
handleJobFinishedByType(this.jobTypeFailureMap, jobType);
break;
case SUCCEEDED:
handleJobFinishedByType(this.jobTypeSucceededMap, jobType);
break;
default:
}
}
private void handleJobFinishedByType(final Map<String, AtomicInteger> jobTypeMap,
final String jobType) {
synchronized (jobTypeMap) {
AtomicInteger count = jobTypeMap.get(jobType);
if (count == null) {
count = new AtomicInteger();
}
count.incrementAndGet();
jobTypeMap.put(jobType, count);
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/jmx/JmxJobMXBean.java
|
package azkaban.execapp.jmx;
import azkaban.jmx.DisplayName;
import java.util.Map;
/**
* Define all the MBean attributes at the job level
*
* @author hluu
*/
public interface JmxJobMXBean {
@DisplayName("OPERATION: getNumRunningJobs")
public int getNumRunningJobs();
@DisplayName("OPERATION: getTotalNumExecutedJobs")
public int getTotalNumExecutedJobs();
@DisplayName("OPERATION: getTotalFailedJobs")
public int getTotalFailedJobs();
@DisplayName("OPERATION: getTotalSucceededJobs")
public int getTotalSucceededJobs();
@DisplayName("OPERATION: getTotalSucceededJobsByJobType")
public Map<String, Integer> getTotalSucceededJobsByJobType();
@DisplayName("OPERATION: getTotalFailedJobsByJobType")
public Map<String, Integer> getTotalFailedJobsByJobType();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/NumFailedFlowMetric.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import azkaban.event.Event;
import azkaban.event.EventListener;
import azkaban.execapp.FlowRunner;
import azkaban.executor.Status;
import azkaban.metric.MetricException;
import azkaban.metric.MetricReportManager;
import azkaban.metric.TimeBasedReportingMetric;
import azkaban.spi.EventType;
/**
* Metric to keep track of number of failed flows in between the tracking events
*/
public class NumFailedFlowMetric extends TimeBasedReportingMetric<Integer> implements
EventListener {
public static final String NUM_FAILED_FLOW_METRIC_NAME = "NumFailedFlowMetric";
private static final String NUM_FAILED_FLOW_METRIC_TYPE = "uint16";
public NumFailedFlowMetric(final MetricReportManager manager, final long interval)
throws MetricException {
super(NUM_FAILED_FLOW_METRIC_NAME, NUM_FAILED_FLOW_METRIC_TYPE, 0, manager, interval);
logger.debug("Instantiated NumFailedJobMetric");
}
/**
* Listen for events to maintain correct value of number of failed flows {@inheritDoc}
*
* @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
*/
@Override
public synchronized void handleEvent(final Event event) {
if (event.getType() == EventType.FLOW_FINISHED) {
final FlowRunner runner = (FlowRunner) event.getRunner();
if (runner != null && runner.getExecutableFlow().getStatus().equals(Status.FAILED)) {
this.value = this.value + 1;
}
}
}
@Override
protected void preTrackingEventMethod() {
// Nothing to finalize before tracking event
}
@Override
protected synchronized void postTrackingEventMethod() {
this.value = 0;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/NumFailedJobMetric.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import azkaban.event.Event;
import azkaban.event.EventListener;
import azkaban.executor.Status;
import azkaban.metric.MetricException;
import azkaban.metric.MetricReportManager;
import azkaban.metric.TimeBasedReportingMetric;
import azkaban.spi.EventType;
/**
* Metric to keep track of number of failed jobs in between the tracking events
*/
public class NumFailedJobMetric extends TimeBasedReportingMetric<Integer> implements EventListener {
public static final String NUM_FAILED_JOB_METRIC_NAME = "NumFailedJobMetric";
private static final String NUM_FAILED_JOB_METRIC_TYPE = "uint16";
public NumFailedJobMetric(final MetricReportManager manager, final long interval)
throws MetricException {
super(NUM_FAILED_JOB_METRIC_NAME, NUM_FAILED_JOB_METRIC_TYPE, 0, manager, interval);
logger.debug("Instantiated NumFailedJobMetric");
}
/**
* Listen for events to maintain correct value of number of failed jobs {@inheritDoc}
*
* @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
*/
@Override
public synchronized void handleEvent(final Event event) {
if (event.getType() == EventType.JOB_FINISHED && Status.FAILED
.equals(event.getData().getStatus())) {
this.value = this.value + 1;
}
}
@Override
protected void preTrackingEventMethod() {
// Nothing to finalize before tracking event
}
@Override
protected synchronized void postTrackingEventMethod() {
this.value = 0;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/NumQueuedFlowMetric.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import azkaban.execapp.FlowRunnerManager;
import azkaban.metric.MetricException;
import azkaban.metric.MetricReportManager;
import azkaban.metric.TimeBasedReportingMetric;
/**
* Metric to keep track of number of queued flows in Azkaban exec server
*/
public class NumQueuedFlowMetric extends TimeBasedReportingMetric<Integer> {
public static final String NUM_QUEUED_FLOW_METRIC_NAME = "NumQueuedFlowMetric";
private static final String NUM_QUEUED_FLOW_METRIC_TYPE = "uint16";
private final FlowRunnerManager flowManager;
/**
* @param flowRunnerManager Flow runner manager
* @param manager metric report manager
* @param interval reporting interval
*/
public NumQueuedFlowMetric(final FlowRunnerManager flowRunnerManager,
final MetricReportManager manager,
final long interval) throws MetricException {
super(NUM_QUEUED_FLOW_METRIC_NAME, NUM_QUEUED_FLOW_METRIC_TYPE, 0, manager, interval);
logger.debug("Instantiated NumQueuedFlowMetric");
this.flowManager = flowRunnerManager;
}
/**
* Update value using flow manager {@inheritDoc}
*
* @see azkaban.metric.TimeBasedReportingMetric#preTrackingEventMethod()
*/
@Override
protected synchronized void preTrackingEventMethod() {
this.value = this.flowManager.getNumQueuedFlows();
}
@Override
protected void postTrackingEventMethod() {
// nothing to post process
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/NumRunningFlowMetric.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import azkaban.execapp.FlowRunnerManager;
import azkaban.metric.MetricException;
import azkaban.metric.MetricReportManager;
import azkaban.metric.TimeBasedReportingMetric;
/**
* Metric to keep track of number of running flows in Azkaban exec server
*/
public class NumRunningFlowMetric extends TimeBasedReportingMetric<Integer> {
public static final String NUM_RUNNING_FLOW_METRIC_NAME = "NumRunningFlowMetric";
private static final String NUM_RUNNING_FLOW_METRIC_TYPE = "uint16";
private final FlowRunnerManager flowManager;
/**
* @param flowRunnerManager Flow runner manager
* @param manager metric report manager
* @param interval reporting interval
*/
public NumRunningFlowMetric(final FlowRunnerManager flowRunnerManager,
final MetricReportManager manager,
final long interval) throws MetricException {
super(NUM_RUNNING_FLOW_METRIC_NAME, NUM_RUNNING_FLOW_METRIC_TYPE, 0, manager, interval);
logger.debug("Instantiated NumRunningFlowMetric");
this.flowManager = flowRunnerManager;
}
/**
* Update value using flow manager {@inheritDoc}
*
* @see azkaban.metric.TimeBasedReportingMetric#preTrackingEventMethod()
*/
@Override
protected synchronized void preTrackingEventMethod() {
this.value = this.flowManager.getNumRunningFlows();
}
@Override
protected void postTrackingEventMethod() {
// nothing to post process
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/NumRunningJobMetric.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import azkaban.event.Event;
import azkaban.event.EventListener;
import azkaban.metric.MetricException;
import azkaban.metric.MetricReportManager;
import azkaban.metric.TimeBasedReportingMetric;
import azkaban.spi.EventType;
/**
* Metric to keep track of number of running jobs in Azkaban exec server
*/
public class NumRunningJobMetric extends TimeBasedReportingMetric<Integer> implements
EventListener {
public static final String NUM_RUNNING_JOB_METRIC_NAME = "NumRunningJobMetric";
private static final String NUM_RUNNING_JOB_METRIC_TYPE = "uint16";
/**
* @param manager metric manager
* @param interval reporting interval
*/
public NumRunningJobMetric(final MetricReportManager manager, final long interval)
throws MetricException {
super(NUM_RUNNING_JOB_METRIC_NAME, NUM_RUNNING_JOB_METRIC_TYPE, 0, manager, interval);
logger.debug("Instantiated NumRunningJobMetric");
}
/**
* Listen for events to maintain correct value of number of running jobs {@inheritDoc}
*
* @see azkaban.event.EventListener#handleEvent(azkaban.event.Event)
*/
@Override
public synchronized void handleEvent(final Event event) {
if (event.getType() == EventType.JOB_STARTED) {
this.value = this.value + 1;
} else if (event.getType() == EventType.JOB_FINISHED) {
this.value = this.value - 1;
}
}
@Override
protected void preTrackingEventMethod() {
// nothing to finalize value is already updated
}
@Override
protected void postTrackingEventMethod() {
// nothing to post process
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp
|
java-sources/ai/databand/azkaban/azkaban-exec-server/3.90.0/azkaban/execapp/metric/ProjectCacheHitRatio.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.execapp.metric;
import com.codahale.metrics.RatioGauge;
import com.codahale.metrics.SlidingWindowReservoir;
import java.util.Arrays;
/**
* Project cache hit ratio of last 100 cache accesses.
*
* <p>The advantage of sampling last 100 caches accesses over time-based sampling like last hour's
* cache accesses is the former is more deterministic. Suppose there's only few execution in last
* hour, then hit ratio might not be truly informative, which doesn't necessarily reflect
* performance of the cache.</p>
*/
public class ProjectCacheHitRatio extends RatioGauge {
private final SlidingWindowReservoir hits;
public static final int WINDOW_SIZE = 100;
public ProjectCacheHitRatio() {
this.hits = new SlidingWindowReservoir(WINDOW_SIZE);
}
public synchronized void markHit() {
this.hits.update(1);
}
public synchronized void markMiss() {
this.hits.update(0);
}
@Override
public synchronized Ratio getRatio() {
final long hitCount = Arrays.stream(this.hits.getSnapshot().getValues()).sum();
return Ratio.of(hitCount, this.hits.getSnapshot().size());
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/AzkabanEventReporter.java
|
package azkaban.spi;
import java.util.Map;
/**
* Implement this interface to report flow and job events. Event reporter
* can be turned on by setting the property {@code AZKABAN_EVENT_REPORTING_ENABLED} to true.
*
* By default, a KafkaAvroEventReporter is provided. Alternate implementations
* can be provided by setting the property {@code AZKABAN_EVENT_REPORTING_CLASS_PARAM}
* <br><br>
* The constructor will be called with a {@code azkaban.utils.Props} object passed as
* the only parameter. If such a constructor doesn't exist, then the AzkabanEventReporter
* instantiation will fail.
*/
public interface AzkabanEventReporter {
boolean report(EventType eventType, Map<String, String> metadata);
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/AzkabanException.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.spi;
public class AzkabanException extends RuntimeException {
public AzkabanException(final String message) {
this(message, null);
}
public AzkabanException(final Throwable throwable) {
this(null, throwable);
}
public AzkabanException(final String message, final Throwable cause) {
super(message, cause);
}
public AzkabanException(final String message, final Throwable cause,
final boolean enableSuppression, final boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
@Override
public String getMessage() {
String message = super.getMessage();
if (message == null && getCause() != null) {
message = getCause().getMessage();
}
return message;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/Dependency.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.spi;
import azkaban.utils.HashUtils;
import azkaban.utils.InvalidHashException;
import java.io.File;
import java.util.Map;
import java.util.Objects;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* Representation of startup dependency. Maps 1:1 to an entry in startup-dependencies.json for thin archives.
* Will automatically validate SHA1 checksum upon instantiation to avoid SQL injection when this checksum is used
* for DB queries, as well as mitigating other issues down the road.
*/
public class Dependency {
private final String fileName;
private final String destination;
private final String type;
private final String ivyCoordinates;
private final String sha1;
public Dependency(final String fileName, final String destination, final String type, final String ivyCoordinates,
final String sha1) throws InvalidHashException {
this.fileName = fileName;
this.destination = destination;
this.type = type;
this.ivyCoordinates = ivyCoordinates;
this.sha1 = HashUtils.SHA1.sanitizeHashStr(sha1);
}
public Dependency(final Map<String, String> fieldMap) throws InvalidHashException {
this(fieldMap.get("file"), fieldMap.get("destination"), fieldMap.get("type"), fieldMap.get("ivyCoordinates"),
fieldMap.get("sha1"));
}
/**
* Make a copy of this dependency
*
* @return a copy of this dependency
*/
public Dependency copy() {
try {
return new Dependency(getFileName(), getDestination(), getType(), getIvyCoordinates(), getSHA1());
} catch (InvalidHashException e) {
// This should never happen because we already validated the hash when creating this dependency
throw new RuntimeException("InvalidHashException when copying dependency.");
}
}
/**
* Make a new DependencyFile with the same details as this dependency
*
* @param file for DependencyFile
* @return the new DependencyFile
*/
public DependencyFile makeDependencyFile(final File file) {
try {
return new DependencyFile(file, getFileName(), getDestination(), getType(), getIvyCoordinates(), getSHA1());
} catch (InvalidHashException e) {
// This should never happen because we already validated the hash when creating this dependency
throw new RuntimeException("InvalidHashException when copying dependency.");
}
}
// it makes much more sense for the getter to be getFileName vs getFile, but in the startup-dependencies.json
// spec we expect the property to be "file" not "fileName" so we have to annotate this to tell the JSON serializer
// to insert it with "file", instead of assuming the name based on the name of the getter like it usually does.
@JsonProperty("file")
public String getFileName() { return this.fileName; }
public String getDestination() { return this.destination; }
public String getType() { return this.type; }
public String getIvyCoordinates() { return this.ivyCoordinates; }
public String getSHA1() { return this.sha1; }
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
Dependency that = (Dependency) o;
return this.fileName.equals(that.fileName) && this.type.equals(that.type) && this.ivyCoordinates.equals(that.ivyCoordinates)
&& this.sha1.equals(that.sha1);
}
@Override
public int hashCode() {
return Objects.hash(this.sha1);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/DependencyFile.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.spi;
import azkaban.utils.InvalidHashException;
import java.io.File;
import java.util.Objects;
/**
* Representation of startup dependency with an associated local file. Usually a DependencyFile will never be
* directly instantiated (except maybe in tests), but rather will be generated from an instance of a Dependency
* using Dependency::makeDependencyFile(File f)
*/
public class DependencyFile extends Dependency {
private final File file;
// NOTE: This should NEVER throw InvalidHashException because the input dependency
// must have already had its cache validated upon instantiation.
public DependencyFile(final File f, final String fileName, final String destination, final String type,
final String ivyCoordinates, final String sha1) throws InvalidHashException {
super(fileName, destination, type, ivyCoordinates, sha1);
this.file = f;
}
public File getFile() { return this.file; }
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
if (!super.equals(o)) {
return false;
}
DependencyFile that = (DependencyFile) o;
return Objects.equals(file, that.file);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/EventType.java
|
package azkaban.spi;
/**
* Enum class defining the list of supported event types.
*/
public enum EventType {
FLOW_STARTED,
FLOW_FINISHED,
JOB_STARTED,
JOB_FINISHED,
JOB_STATUS_CHANGED,
EXTERNAL_FLOW_UPDATED,
EXTERNAL_JOB_UPDATED
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/FileValidationStatus.java
|
/*
* Copyright 2019 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.spi;
import java.util.HashMap;
import java.util.Map;
// REMOVED = validator removed this file, it is blacklisted
// VALID = validator gave this file the green light - no modifications made, it's fine as is.
// NEW = not yet processed by the validator
public enum FileValidationStatus {
REMOVED(0), VALID(1), NEW(2);
private final int value;
private static Map map = new HashMap<>();
FileValidationStatus(final int newValue) {
value = newValue;
}
static {
for (FileValidationStatus v : FileValidationStatus.values()) {
map.put(v.value, v);
}
}
public static FileValidationStatus valueOf(final int v) {
return (FileValidationStatus) map.get(v);
}
public int getValue() { return value; }
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/ProjectStorageMetadata.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.spi;
import static java.util.Objects.requireNonNull;
import java.util.Objects;
public class ProjectStorageMetadata {
private final int projectId;
private final int version;
private final String uploader;
private final byte[] hash;
private final String uploaderIPAddr;
public ProjectStorageMetadata(final int projectId, final int version, final String uploader,
final byte[] hash, final String uploaderIPAddr) {
this.projectId = projectId;
this.version = version;
this.uploader = requireNonNull(uploader);
this.hash = hash;
this.uploaderIPAddr = uploaderIPAddr;
}
@Override
public String toString() {
return "StorageMetadata{" + "projectId='" + this.projectId + '\'' + ", version='" + this.version
+ '\''
+ '}';
}
public int getProjectId() {
return this.projectId;
}
public int getVersion() {
return this.version;
}
public String getUploader() {
return this.uploader;
}
public byte[] getHash() {
return this.hash;
}
public String getUploaderIPAddr() {
return this.uploaderIPAddr;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final ProjectStorageMetadata that = (ProjectStorageMetadata) o;
return Objects.equals(this.projectId, that.projectId) &&
Objects.equals(this.version, that.version) &&
Objects.equals(this.uploader, that.uploader);
}
@Override
public int hashCode() {
return Objects.hash(this.projectId, this.version, this.uploader);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/Storage.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.spi;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
/**
* The Azkaban Storage interface would facilitate getting and putting objects into a storage
* mechanism of choice. By default, this is set to the MySQL database. However, users can have the
* ability to choose between multiple storage types in future.
*
* This is different from storing Azkaban state in MySQL which would typically be maintained in a
* different database.
*
* Note: This is a synchronous interface.
*/
public interface Storage {
/**
* Get an InputStream object for a project by providing a key.
*
* @param key The key is a string pointing to the blob in Storage.
* @return InputStream for fetching the blob. null if the key is not found.
*/
InputStream getProject(String key) throws IOException;
/**
* Put a project and return a key.
*
* @param metadata Metadata related to the input stream
* @param localFile Read data from a local file
* @return Key associated with the current object on successful put
*/
String putProject(ProjectStorageMetadata metadata, File localFile);
/**
* Get an InputStream object for a dependency.
*
* @param dep the dependency to fetch
* @return InputStream for fetching the blob.
*/
InputStream getDependency(Dependency dep) throws IOException;
/**
* Get root path of dependency location (should be http://...) OR if thin archives is not enabled, will be null.
*/
String getDependencyRootPath();
/**
* See if dependency fetching is enabled (required azkaban.properties were set).
*/
boolean dependencyFetchingEnabled();
/**
* Delete an object from Storage.
*
* @param key The key is a string pointing to the blob in Storage.
* @return true if delete was successful. false if there was nothing to delete.
*/
boolean deleteProject(String key);
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-spi/3.90.0/azkaban/spi/StorageException.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.spi;
/**
* Super class to capture any exceptions related to {@link Storage}
*/
public class StorageException extends AzkabanException {
public StorageException(final String message) {
this(message, null);
}
public StorageException(final Throwable throwable) {
this(null, throwable);
}
public StorageException(final String message, final Throwable cause) {
super(message, cause);
}
public StorageException(final String message, final Throwable cause,
final boolean enableSuppression, final boolean writableStackTrace) {
super(message, cause, enableSuppression, writableStackTrace);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/CancellationCause.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
public enum CancellationCause {
NONE, //no cancellation occurred
TIMEOUT, // cancellation is issued due to exceeding max wait time
MANUAL, // cancellation is issued by user
FAILURE, // cancellation is caused by dependency instance failure(e.x invalid input)
CASCADING // cancellation is caused by cascading failure(e.x one dependency instance failure
// leads to other dependency instances being cancelled)
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyException.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.spi.AzkabanException;
public class DependencyException extends AzkabanException {
private static final long serialVersionUID = 1L;
public DependencyException(final String message) {
super(message);
}
public DependencyException(final String message, final Throwable cause) {
super(message, cause);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyInstance.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
public class DependencyInstance {
private final long startTime;
private final String depName;
private TriggerInstance triggerInstance;
private DependencyInstanceContext context;
private volatile long endTime;
private volatile Status status;
private volatile CancellationCause cause;
//todo chengren311: convert it to builder
public DependencyInstance(final String depName, final long startTime,
final long endTime, final DependencyInstanceContext context, final Status status,
final CancellationCause cause) {
this.status = status;
this.depName = depName;
this.startTime = startTime;
this.endTime = endTime;
this.context = context;
this.cause = cause;
}
@Override
public String toString() {
return "DependencyInstance{" +
"startTime=" + this.startTime +
", depName='" + this.depName + '\'' +
", context=" + this.context +
", endTime=" + this.endTime +
", status=" + this.status +
", cause=" + this.cause +
'}';
}
public CancellationCause getCancellationCause() {
return this.cause;
}
public void setCancellationCause(final CancellationCause cancellationCause) {
this.cause = cancellationCause;
}
public TriggerInstance getTriggerInstance() {
return this.triggerInstance;
}
public void setTriggerInstance(final TriggerInstance triggerInstance) {
this.triggerInstance = triggerInstance;
}
public void setDependencyInstanceContext(final DependencyInstanceContext context) {
this.context = context;
}
public long getStartTime() {
return this.startTime;
}
public long getEndTime() {
return this.endTime;
}
public void setEndTime(final long endTime) {
this.endTime = endTime;
}
public String getDepName() {
return this.depName;
}
public DependencyInstanceContext getContext() {
return this.context;
}
public Status getStatus() {
return this.status;
}
public void setStatus(final Status status) {
this.status = status;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyInstanceCallbackImpl.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import com.google.common.base.Preconditions;
public class DependencyInstanceCallbackImpl implements DependencyInstanceCallback {
private final FlowTriggerService service;
public DependencyInstanceCallbackImpl(final FlowTriggerService service) {
Preconditions.checkNotNull(service);
this.service = service;
}
@Override
public void onSuccess(final DependencyInstanceContext depContext) {
this.service.markDependencySuccess(depContext);
}
@Override
public void onCancel(final DependencyInstanceContext depContext) {
this.service.markDependencyCancelled(depContext);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyInstanceConfigImpl.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
public class DependencyInstanceConfigImpl implements DependencyInstanceConfig {
private final Map<String, String> props;
public DependencyInstanceConfigImpl(final Map<String, String> props) {
this.props = ImmutableMap.copyOf(props);
}
@Override
public String toString() {
return "DependencyInstanceConfigImpl{" +
"props=" + this.props +
'}';
}
@Override
public String get(final String key) {
return this.props.get(key);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyInstanceProcessor.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.flowtrigger.database.FlowTriggerInstanceLoader;
import javax.inject.Inject;
import javax.inject.Singleton;
@Singleton
public class DependencyInstanceProcessor {
private final FlowTriggerInstanceLoader flowTriggerInstanceLoader;
@Inject
public DependencyInstanceProcessor(final FlowTriggerInstanceLoader depLoader) {
this.flowTriggerInstanceLoader = depLoader;
}
/**
* Process status update of dependency instance
*/
public void processStatusUpdate(final DependencyInstance depInst) {
//this is blocking call, might offload it to another thread if necessary.
this.flowTriggerInstanceLoader.updateDependencyExecutionStatus(depInst);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyInstanceRuntimePropsImpl.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
public class DependencyInstanceRuntimePropsImpl implements DependencyInstanceRuntimeProps {
private final Map<String, String> props;
public DependencyInstanceRuntimePropsImpl(final Map<String, String> props) {
this.props = ImmutableMap.copyOf(props);
}
@Override
public String toString() {
return "DependencyInstanceRuntimePropsImpl{" +
"props=" + this.props +
'}';
}
@Override
public String get(final String key) {
return this.props.get(key);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/DependencyPluginConfigImpl.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
public class DependencyPluginConfigImpl implements DependencyPluginConfig {
private final Map<String, String> props;
public DependencyPluginConfigImpl(final Map<String, String> props) {
this.props = ImmutableMap.copyOf(props);
}
@Override
public String toString() {
return "DependencyPluginConfigImpl{" +
"props=" + this.props +
'}';
}
@Override
public String get(final String key) {
return this.props.get(key);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/FlowTriggerExecutionCleaner.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.flowtrigger.database.FlowTriggerInstanceLoader;
import java.time.Duration;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
/**
* This is to purge old flow trigger execution records from the db table.
* Otherwise the table will keep growing indefinitely as triggers are executed, leading to
* excessive query time on the table.
* The cleanup policy is removing trigger instances finishing older than 30 days back.
*/
@SuppressWarnings("FutureReturnValueIgnored")
public class FlowTriggerExecutionCleaner {
private static final Duration CLEAN_INTERVAL = Duration.ofMinutes(10);
private static final Duration RETENTION_PERIOD = Duration.ofDays(30);
private final ScheduledExecutorService scheduler;
private final FlowTriggerInstanceLoader flowTriggerInstanceLoader;
@Inject
public FlowTriggerExecutionCleaner(final FlowTriggerInstanceLoader loader) {
this.flowTriggerInstanceLoader = loader;
this.scheduler = Executors.newSingleThreadScheduledExecutor();
}
public void start() {
this.scheduler.scheduleAtFixedRate(() -> {
FlowTriggerExecutionCleaner.this.flowTriggerInstanceLoader
.deleteTriggerExecutionsFinishingOlderThan(System
.currentTimeMillis() - RETENTION_PERIOD.toMillis());
}, 0, CLEAN_INTERVAL.getSeconds(), TimeUnit.SECONDS);
}
public void shutdown() {
this.scheduler.shutdown();
this.scheduler.shutdownNow();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/FlowTriggerService.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.Constants;
import azkaban.Constants.FlowTriggerProps;
import azkaban.flowtrigger.database.FlowTriggerInstanceLoader;
import azkaban.flowtrigger.plugin.FlowTriggerDependencyPluginException;
import azkaban.flowtrigger.plugin.FlowTriggerDependencyPluginManager;
import azkaban.project.FlowTrigger;
import azkaban.project.FlowTriggerDependency;
import azkaban.project.Project;
import com.google.common.collect.ImmutableMap;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* FlowTriggerService is a singleton class in the AZ web server to
* process all trigger-related operations. Externally it provides following
* operations -
* 1. Create a trigger instance based on trigger definition.
* 2. Cancel a trigger instance.
* 3. Query running and historic trigger instances.
* 4. Recover incomplete trigger instances.
*
* Internally, it
* 1. maintains the list of running trigger instance in memory.
* 2. updates status, starttime/endtime of trigger instance.
* 3. persists trigger instance to DB.
*
* FlowTriggerService will be leveraged by Quartz scheduler, our new AZ scheduler to schedule
* triggers.
*
* After construction, call {@link #start()} to start the service.
*/
@SuppressWarnings("FutureReturnValueIgnored")
@Singleton
public class FlowTriggerService {
private static final Logger logger = LoggerFactory.getLogger(FlowTriggerService.class);
private static final Duration CANCELLING_GRACE_PERIOD_AFTER_RESTART = Duration.ofMinutes(1);
private static final int RECENTLY_FINISHED_TRIGGER_LIMIT = 50;
private static final int CANCEL_EXECUTOR_POOL_SIZE = 32;
private static final int TIMEOUT_EXECUTOR_POOL_SIZE = 8;
private final ExecutorService flowTriggerExecutorService;
private final ExecutorService cancelExecutorService;
private final ScheduledExecutorService timeoutService;
private final List<TriggerInstance> runningTriggers;
private final FlowTriggerDependencyPluginManager triggerPluginManager;
private final TriggerInstanceProcessor triggerProcessor;
private final FlowTriggerInstanceLoader flowTriggerInstanceLoader;
private final DependencyInstanceProcessor dependencyProcessor;
private final FlowTriggerExecutionCleaner cleaner;
@Inject
public FlowTriggerService(final FlowTriggerDependencyPluginManager pluginManager,
final TriggerInstanceProcessor triggerProcessor, final DependencyInstanceProcessor
dependencyProcessor, final FlowTriggerInstanceLoader flowTriggerInstanceLoader,
final FlowTriggerExecutionCleaner cleaner) {
// Give the thread a name to make debugging easier.
ThreadFactory namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("FlowTrigger-service").build();
this.flowTriggerExecutorService = Executors.newSingleThreadExecutor(namedThreadFactory);
namedThreadFactory = new ThreadFactoryBuilder()
.setNameFormat("FlowTrigger-cancel").build();
this.cancelExecutorService = Executors
.newFixedThreadPool(CANCEL_EXECUTOR_POOL_SIZE, namedThreadFactory);
this.timeoutService = Executors.newScheduledThreadPool(TIMEOUT_EXECUTOR_POOL_SIZE);
this.runningTriggers = new ArrayList<>();
this.triggerPluginManager = pluginManager;
this.triggerProcessor = triggerProcessor;
this.dependencyProcessor = dependencyProcessor;
this.flowTriggerInstanceLoader = flowTriggerInstanceLoader;
this.cleaner = cleaner;
}
public void start() throws FlowTriggerDependencyPluginException {
this.triggerPluginManager.loadAllPlugins();
this.recoverIncompleteTriggerInstances();
this.cleaner.start();
}
private DependencyInstanceContext createDepContext(final FlowTriggerDependency dep, final long
startTimeInMills, final String triggerInstId) throws Exception {
final DependencyCheck dependencyCheck = this.triggerPluginManager
.getDependencyCheck(dep.getType());
final DependencyInstanceCallback callback = new DependencyInstanceCallbackImpl(this);
final Map<String, String> depInstConfig = new HashMap<>();
depInstConfig.putAll(dep.getProps());
depInstConfig.put(FlowTriggerProps.DEP_NAME, dep.getName());
final DependencyInstanceConfigImpl config = new DependencyInstanceConfigImpl(depInstConfig);
final DependencyInstanceRuntimeProps runtimeProps = new DependencyInstanceRuntimePropsImpl
(ImmutableMap
.of(FlowTriggerProps.START_TIME, String.valueOf(startTimeInMills), FlowTriggerProps
.TRIGGER_INSTANCE_ID, triggerInstId));
return dependencyCheck.run(config, runtimeProps, callback);
}
private TriggerInstance createTriggerInstance(final FlowTrigger flowTrigger, final String flowId,
final int flowVersion, final String submitUser, final Project project) {
final String triggerInstId = generateId();
final long startTime = System.currentTimeMillis();
// create a list of dependency instances
final List<DependencyInstance> depInstList = new ArrayList<>();
for (final FlowTriggerDependency dep : flowTrigger.getDependencies()) {
final String depName = dep.getName();
DependencyInstanceContext context = null;
try {
context = createDepContext(dep, startTime, triggerInstId);
} catch (final Exception ex) {
logger.error("unable to create dependency context for trigger instance[id = {}]",
triggerInstId, ex);
}
// if dependency instance context fails to be created, then its status is cancelled and
// cause is failure
final Status status = context == null ? Status.CANCELLED : Status.RUNNING;
final CancellationCause cause =
context == null ? CancellationCause.FAILURE : CancellationCause.NONE;
final long endTime = context == null ? System.currentTimeMillis() : 0;
final DependencyInstance depInst = new DependencyInstance(depName, startTime, endTime,
context, status, cause);
depInstList.add(depInst);
}
final TriggerInstance triggerInstance = new TriggerInstance(triggerInstId, flowTrigger,
flowId, flowVersion, submitUser, depInstList, Constants.UNASSIGNED_EXEC_ID, project);
return triggerInstance;
}
private String generateId() {
return UUID.randomUUID().toString();
}
private void scheduleKill(final TriggerInstance triggerInst, final Duration duration, final
CancellationCause cause) {
logger
.debug("cancel trigger instance {} in {} secs", triggerInst.getId(), duration
.getSeconds());
this.timeoutService.schedule(() -> {
cancelTriggerInstance(triggerInst, cause);
}, duration.toMillis(), TimeUnit.MILLISECONDS);
}
/**
* @return the list of running trigger instances
*/
public Collection<TriggerInstance> getRunningTriggers() {
return this.flowTriggerInstanceLoader.getRunning();
}
/**
* @return the list of running trigger instances
*/
public Collection<TriggerInstance> getRecentlyFinished() {
return this.flowTriggerInstanceLoader.getRecentlyFinished(RECENTLY_FINISHED_TRIGGER_LIMIT);
}
public TriggerInstance findTriggerInstanceById(final String triggerInstanceId) {
return this.flowTriggerInstanceLoader.getTriggerInstanceById(triggerInstanceId);
}
public TriggerInstance findTriggerInstanceByExecId(final int flowExecId) {
return this.flowTriggerInstanceLoader.getTriggerInstanceByFlowExecId(flowExecId);
}
private boolean isDoneButFlowNotExecuted(final TriggerInstance triggerInstance) {
return triggerInstance.getStatus() == Status.SUCCEEDED && triggerInstance.getFlowExecId() ==
Constants.UNASSIGNED_EXEC_ID;
}
private void recoverRunningOrCancelling(final TriggerInstance triggerInstance) {
final FlowTrigger flowTrigger = triggerInstance.getFlowTrigger();
for (final DependencyInstance depInst : triggerInstance.getDepInstances()) {
if (depInst.getStatus() == Status.RUNNING || depInst.getStatus() == Status.CANCELLING) {
final FlowTriggerDependency dependency = flowTrigger
.getDependencyByName(depInst.getDepName());
DependencyInstanceContext context = null;
try {
//recreate dependency instance context
context = createDepContext(dependency, depInst.getStartTime(), depInst
.getTriggerInstance().getId());
} catch (final Exception ex) {
logger
.error(
"unable to create dependency context for trigger instance[id ="
+ " {}]", triggerInstance.getId(), ex);
}
depInst.setDependencyInstanceContext(context);
if (context == null) {
depInst.setStatus(Status.CANCELLED);
depInst.setCancellationCause(CancellationCause.FAILURE);
}
}
}
if (triggerInstance.getStatus() == Status.CANCELLING) {
addToRunningListAndCancel(triggerInstance);
} else if (triggerInstance.getStatus() == Status.RUNNING) {
final long remainingTime = remainingTimeBeforeTimeout(triggerInstance);
addToRunningListAndScheduleKill(triggerInstance, Duration.ofMillis(remainingTime).plus
(CANCELLING_GRACE_PERIOD_AFTER_RESTART), CancellationCause.TIMEOUT);
}
}
private void recoverTriggerInstance(final TriggerInstance triggerInstance) {
this.flowTriggerExecutorService.submit(() -> recover(triggerInstance));
}
private void recover(final TriggerInstance triggerInstance) {
logger.info("recovering pending trigger instance {}", triggerInstance.getId());
if (isDoneButFlowNotExecuted(triggerInstance)) {
// if trigger instance succeeds but the associated flow hasn't been started yet, then start
// the flow
this.triggerProcessor.processSucceed(triggerInstance);
} else {
recoverRunningOrCancelling(triggerInstance);
}
}
/**
* Resume executions of all incomplete trigger instances by recovering the state from db.
*/
public void recoverIncompleteTriggerInstances() {
final Collection<TriggerInstance> unfinishedTriggerInstances = this.flowTriggerInstanceLoader
.getIncompleteTriggerInstances();
for (final TriggerInstance triggerInstance : unfinishedTriggerInstances) {
if (triggerInstance.getFlowTrigger() != null) {
recoverTriggerInstance(triggerInstance);
} else {
logger.error("cannot recover the trigger instance {}, flow trigger is null,"
+ " cancelling it ", triggerInstance.getId());
//finalize unrecoverable trigger instances
// the following situation would cause trigger instances unrecoverable:
// 1. project A with flow A associated with flow trigger A is uploaded
// 2. flow trigger A starts to run
// 3. project A with flow B without any flow trigger is uploaded
// 4. web server restarts
// in this case, flow trigger instance of flow trigger A will be equipped with latest
// project, thus failing to find the flow trigger since new project doesn't contain flow
// trigger at all
if (isDoneButFlowNotExecuted(triggerInstance)) {
triggerInstance.setFlowExecId(Constants.FAILED_EXEC_ID);
this.flowTriggerInstanceLoader.updateAssociatedFlowExecId(triggerInstance);
} else {
for (final DependencyInstance depInst : triggerInstance.getDepInstances()) {
if (!Status.isDone(depInst.getStatus())) {
processStatusAndCancelCauseUpdate(depInst, Status.CANCELLED,
CancellationCause.FAILURE);
this.triggerProcessor.processTermination(depInst.getTriggerInstance());
}
}
}
}
}
}
private void addToRunningListAndScheduleKill(final TriggerInstance triggerInst, final
Duration durationBeforeKill, final CancellationCause cause) {
// if trigger instance is already done
if (!Status.isDone(triggerInst.getStatus())) {
this.runningTriggers.add(triggerInst);
scheduleKill(triggerInst, durationBeforeKill, cause);
}
}
private CancellationCause getCancelleationCause(final TriggerInstance triggerInst) {
final Set<CancellationCause> causes = triggerInst.getDepInstances().stream()
.map(DependencyInstance::getCancellationCause).collect(Collectors.toSet());
if (causes.contains(CancellationCause.FAILURE) || causes
.contains(CancellationCause.CASCADING)) {
return CancellationCause.CASCADING;
} else if (causes.contains(CancellationCause.TIMEOUT)) {
return CancellationCause.TIMEOUT;
} else if (causes.contains(CancellationCause.MANUAL)) {
return CancellationCause.MANUAL;
} else {
return CancellationCause.NONE;
}
}
private void cancelTriggerInstance(final TriggerInstance triggerInst) {
logger.debug("cancelling trigger instance of exec id" + triggerInst.getId());
final CancellationCause cause = getCancelleationCause(triggerInst);
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
if (depInst.getStatus() == Status.CANCELLING) {
cancelContextAsync(depInst.getContext());
} else if (depInst.getStatus() == Status.RUNNING) {
// sometimes dependency instances of trigger instance in cancelling status can be running.
// e.x. dep inst1: failure, dep inst2: running -> trigger inst is in killing
this.processStatusAndCancelCauseUpdate(depInst, Status.CANCELLING, cause);
cancelContextAsync(depInst.getContext());
}
}
}
private void addToRunningListAndCancel(final TriggerInstance triggerInst) {
this.runningTriggers.add(triggerInst);
cancelTriggerInstance(triggerInst);
}
private void updateDepInstStatus(final DependencyInstance depInst, final Status newStatus) {
depInst.setStatus(newStatus);
if (Status.isDone(depInst.getStatus())) {
depInst.setEndTime(System.currentTimeMillis());
}
}
private void processStatusUpdate(final DependencyInstance depInst, final Status newStatus) {
logger.debug("process status update for " + depInst);
updateDepInstStatus(depInst, newStatus);
this.dependencyProcessor.processStatusUpdate(depInst);
}
private void processStatusAndCancelCauseUpdate(final DependencyInstance depInst, final Status
newStatus, final CancellationCause cause) {
depInst.setCancellationCause(cause);
updateDepInstStatus(depInst, newStatus);
this.dependencyProcessor.processStatusUpdate(depInst);
}
private long remainingTimeBeforeTimeout(final TriggerInstance triggerInst) {
final long now = System.currentTimeMillis();
return Math.max(0,
triggerInst.getFlowTrigger().getMaxWaitDuration().get().toMillis() - (now - triggerInst
.getStartTime()));
}
/**
* Start the trigger. The method will be scheduled to invoke by azkaban scheduler.
*/
public void startTrigger(final FlowTrigger flowTrigger, final String flowId,
final int flowVersion, final String submitUser, final Project project) {
final TriggerInstance triggerInst = createTriggerInstance(flowTrigger, flowId, flowVersion,
submitUser, project);
this.flowTriggerExecutorService.submit(() -> {
logger.info("Starting the flow trigger [trigger instance id: {}] by {}",
triggerInst.getId(), submitUser);
start(triggerInst);
});
}
private void start(final TriggerInstance triggerInst) {
this.triggerProcessor.processNewInstance(triggerInst);
if (triggerInst.getStatus() == Status.CANCELLED) {
// all dependency instances failed
logger.info(
"Trigger instance[id: {}] is cancelled since all dependency instances fail to be created",
triggerInst.getId());
this.triggerProcessor.processTermination(triggerInst);
} else if (triggerInst.getStatus() == Status.CANCELLING) {
// some of the dependency instances failed
logger.info(
"Trigger instance[id: {}] is being cancelled since some dependency instances fail to be created",
triggerInst.getId());
addToRunningListAndCancel(triggerInst);
} else if (triggerInst.getStatus() == Status.SUCCEEDED) {
this.triggerProcessor.processSucceed(triggerInst);
} else {
// todo chengren311: it's possible web server restarts before the db update, then
// new instance will not be recoverable from db.
addToRunningListAndScheduleKill(triggerInst, triggerInst.getFlowTrigger()
.getMaxWaitDuration().get(), CancellationCause.TIMEOUT);
}
}
public TriggerInstance findRunningTriggerInstById(final String triggerInstId) {
final Future<TriggerInstance> future = this.flowTriggerExecutorService.submit(
() -> getTriggerInstanceById(triggerInstId)
);
try {
return future.get();
} catch (final Exception e) {
logger.error("exception when finding trigger instance by id" + triggerInstId, e);
return null;
}
}
private TriggerInstance getTriggerInstanceById(final String triggerInstId) {
return this.runningTriggers.stream()
.filter(triggerInst -> triggerInst.getId().equals(triggerInstId)).findFirst()
.orElse(null);
}
private void cancelContextAsync(final DependencyInstanceContext context) {
this.cancelExecutorService.submit(() -> context.cancel());
}
/**
* Cancel a trigger instance
*
* @param triggerInst trigger instance to be cancelled
* @param cause cause of cancelling
*/
public void cancelTriggerInstance(final TriggerInstance triggerInst,
final CancellationCause cause) {
if (triggerInst.getStatus() == Status.RUNNING) {
this.flowTriggerExecutorService.submit(() -> cancel(triggerInst, cause));
}
}
private void cancel(final TriggerInstance triggerInst, final CancellationCause cause) {
logger.info("cancelling trigger instance with id {}", triggerInst.getId());
if (triggerInst != null) {
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
// cancel running dependencies only, no need to cancel a killed/successful dependency
// instance
if (depInst.getStatus() == Status.RUNNING) {
this.processStatusAndCancelCauseUpdate(depInst, Status.CANCELLING, cause);
cancelContextAsync(depInst.getContext());
}
}
} else {
logger.debug("unable to cancel a trigger instance in non-running state with id {}",
triggerInst.getId());
}
}
private DependencyInstance findDependencyInstanceByContext(
final DependencyInstanceContext context) {
return this.runningTriggers.stream()
.flatMap(triggerInst -> triggerInst.getDepInstances().stream()).filter(
depInst -> depInst.getContext() != null && depInst.getContext() == context)
.findFirst().orElse(null);
}
/**
* Mark the dependency instance context as success
*/
public void markDependencySuccess(final DependencyInstanceContext context) {
this.flowTriggerExecutorService.submit(() -> markSuccess(context));
}
private void markSuccess(final DependencyInstanceContext context) {
final DependencyInstance depInst = findDependencyInstanceByContext(context);
if (depInst != null) {
if (Status.isDone(depInst.getStatus())) {
logger.warn("OnSuccess of dependency instance[id: {}, name: {}] is ignored",
depInst.getTriggerInstance().getId(), depInst.getDepName());
return;
}
// if the status transits from cancelling to succeeded, then cancellation cause was set,
// we need to unset cancellation cause.
this.processStatusAndCancelCauseUpdate(depInst, Status.SUCCEEDED, CancellationCause.NONE);
// if associated trigger instance becomes success, then remove it from running list
if (depInst.getTriggerInstance().getStatus() == Status.SUCCEEDED) {
logger.info("trigger instance[id: {}] succeeded", depInst.getTriggerInstance().getId());
this.triggerProcessor.processSucceed(depInst.getTriggerInstance());
this.runningTriggers.remove(depInst.getTriggerInstance());
}
} else {
logger.debug("unable to find trigger instance with context {} when marking it success",
context);
}
}
private boolean cancelledByAzkaban(final DependencyInstance depInst) {
return depInst.getStatus() == Status.CANCELLING;
}
private boolean cancelledByDependencyPlugin(final DependencyInstance depInst) {
// When onKill is called by the dependency plugin not through flowTriggerService, we treat it
// as cancelled by dependency due to failure on dependency side. In this case, cancel cause
// remains unset.
return depInst.getStatus() == Status.RUNNING;
}
public void markDependencyCancelled(final DependencyInstanceContext context) {
this.flowTriggerExecutorService.submit(() -> {
markCancelled(context);
});
}
private void markCancelled(final DependencyInstanceContext context) {
final DependencyInstance depInst = findDependencyInstanceByContext(context);
if (depInst != null) {
if (cancelledByDependencyPlugin(depInst)) {
processStatusAndCancelCauseUpdate(depInst, Status.CANCELLED, CancellationCause.FAILURE);
cancelTriggerInstance(depInst.getTriggerInstance());
} else if (cancelledByAzkaban(depInst)) {
processStatusUpdate(depInst, Status.CANCELLED);
} else {
logger.warn("OnCancel of dependency instance[id: {}, name: {}] is ignored",
depInst.getTriggerInstance().getId(), depInst.getDepName());
return;
}
if (depInst.getTriggerInstance().getStatus() == Status.CANCELLED) {
logger.info("trigger instance with execId {} is cancelled",
depInst.getTriggerInstance().getId());
this.triggerProcessor.processTermination(depInst.getTriggerInstance());
this.runningTriggers.remove(depInst.getTriggerInstance());
}
} else {
logger.warn("unable to find trigger instance with context {} when marking "
+ "it cancelled", context);
}
}
/**
* Shuts down the service immediately.
*/
public void shutdown() {
this.flowTriggerExecutorService.shutdown();
this.cancelExecutorService.shutdown();
this.timeoutService.shutdown();
this.flowTriggerExecutorService.shutdownNow();
this.cancelExecutorService.shutdownNow();
this.timeoutService.shutdownNow();
this.triggerProcessor.shutdown();
this.triggerPluginManager.shutdown();
this.cleaner.shutdown();
}
public Collection<TriggerInstance> getTriggerInstances(final int projectId, final String flowId,
final int from, final int length) {
return this.flowTriggerInstanceLoader.getTriggerInstances(projectId, flowId, from, length);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/Status.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import com.google.common.collect.ImmutableSet;
import java.util.Set;
/**
* Represents status for trigger/dependency
*/
public enum Status {
RUNNING, // dependency instance is running
SUCCEEDED, // dependency instance succeeds
CANCELLED, // dependency instance is cancelled
CANCELLING; // dependency instance is being cancelled
public static boolean isDone(final Status status) {
final Set<Status> terminalStatus = ImmutableSet.of(SUCCEEDED, CANCELLED);
return terminalStatus.contains(status);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/TriggerInstance.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.project.FlowTrigger;
import azkaban.project.Project;
import com.google.common.collect.ImmutableList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
public class TriggerInstance {
private final List<DependencyInstance> depInstances;
private final String id;
private final String submitUser;
private final Project project;
private final String flowId;
private final int flowVersion;
private FlowTrigger flowTrigger;
private volatile int flowExecId; // associated flow execution id
//todo chengren311: convert it to builder
public TriggerInstance(final String id, final FlowTrigger flowTrigger, final String flowId,
final int flowVersion, final String submitUser, final List<DependencyInstance>
depInstances, final int flowExecId, final Project project) {
this.depInstances = ImmutableList.copyOf(depInstances);
this.id = id;
this.flowTrigger = flowTrigger;
this.submitUser = submitUser;
this.flowId = flowId;
this.flowVersion = flowVersion;
this.flowExecId = flowExecId;
this.project = project;
for (final DependencyInstance depInst : this.depInstances) {
depInst.setTriggerInstance(this);
}
}
@Override
public String toString() {
return "TriggerInstance{" +
"depInstances=" + this.depInstances +
", id='" + this.id + '\'' +
", submitUser='" + this.submitUser + '\'' +
", project=" + this.project +
", flowId='" + this.flowId + '\'' +
", flowVersion=" + this.flowVersion +
", flowTrigger=" + this.flowTrigger +
", flowExecId=" + this.flowExecId +
'}';
}
public Project getProject() {
return this.project;
}
public String getProjectName() {
return this.project.getName();
}
public List<String> getFailureEmails() {
return this.project.getFlow(this.getFlowId()).getFailureEmails();
}
public String getFlowId() {
return this.flowId;
}
public int getFlowVersion() {
return this.flowVersion;
}
public int getFlowExecId() {
return this.flowExecId;
}
public void setFlowExecId(final int flowExecId) {
this.flowExecId = flowExecId;
}
public final FlowTrigger getFlowTrigger() {
return this.flowTrigger;
}
public void setFlowTrigger(final FlowTrigger flowTrigger) {
this.flowTrigger = flowTrigger;
}
public String getSubmitUser() {
return this.submitUser;
}
public void addDependencyInstance(final DependencyInstance depInst) {
this.depInstances.add(depInst);
}
public List<DependencyInstance> getDepInstances() {
return this.depInstances;
}
public String getId() {
return this.id;
}
private boolean isRunning(final Set<Status> statuses) {
if (statuses.contains(Status.RUNNING)) {
for (final Status status : statuses) {
if (!status.equals(Status.SUCCEEDED) && !status.equals(Status.RUNNING)) {
return false;
}
}
return true;
}
return false;
}
private boolean isSucceed(final Set<Status> statuses) {
return statuses.contains(Status.SUCCEEDED) && statuses.size() == 1;
}
private boolean isCancelled(final Set<Status> statuses) {
if (statuses.contains(Status.CANCELLED)) {
for (final Status status : statuses) {
if (!status.equals(Status.SUCCEEDED) && !status.equals(Status.CANCELLED)) {
return false;
}
}
return true;
}
return false;
}
public Status getStatus() {
// no-dependency trigger is always considered as success
if (this.depInstances.isEmpty()) {
return Status.SUCCEEDED;
}
final Set<Status> statusSet = new HashSet<>();
for (final DependencyInstance depInst : this.depInstances) {
statusSet.add(depInst.getStatus());
}
if (isRunning(statusSet)) {
return Status.RUNNING;
} else if (isSucceed(statusSet)) {
return Status.SUCCEEDED;
} else if (isCancelled(statusSet)) {
return Status.CANCELLED;
} else {
return Status.CANCELLING;
}
}
public long getStartTime() {
final List<Long> startTimeList = this.depInstances.stream()
.map(DependencyInstance::getStartTime).collect(Collectors.toList());
return startTimeList.isEmpty() ? 0 : Collections.min(startTimeList);
}
public long getEndTime() {
if (Status.isDone(this.getStatus())) {
final List<Long> endTimeList = this.depInstances.stream()
.map(DependencyInstance::getEndTime).filter(endTime -> endTime != 0)
.collect(Collectors.toList());
return endTimeList.isEmpty() ? 0 : Collections.max(endTimeList);
} else {
return 0;
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/TriggerInstanceProcessor.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger;
import azkaban.Constants;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.flow.Flow;
import azkaban.flow.FlowUtils;
import azkaban.flowtrigger.database.FlowTriggerInstanceLoader;
import azkaban.project.Project;
import azkaban.utils.EmailMessage;
import azkaban.utils.Emailer;
import azkaban.utils.TimeUtils;
import com.google.common.base.Preconditions;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
@SuppressWarnings("FutureReturnValueIgnored")
public class TriggerInstanceProcessor {
private static final Logger logger = LoggerFactory.getLogger(TriggerInstanceProcessor.class);
private static final String FAILURE_EMAIL_SUBJECT = "flow trigger for flow '%s', project '%s' "
+ "has been cancelled on %s";
private final static int THREAD_POOL_SIZE = 32;
private final ExecutorManagerAdapter executorManager;
private final FlowTriggerInstanceLoader flowTriggerInstanceLoader;
private final Emailer emailer;
private final ExecutorService executorService;
@Inject
public TriggerInstanceProcessor(final ExecutorManagerAdapter executorManager,
final FlowTriggerInstanceLoader flowTriggerInstanceLoader,
final Emailer emailer) {
Preconditions.checkNotNull(executorManager);
Preconditions.checkNotNull(flowTriggerInstanceLoader);
Preconditions.checkNotNull(emailer);
this.emailer = emailer;
this.executorManager = executorManager;
this.flowTriggerInstanceLoader = flowTriggerInstanceLoader;
this.executorService = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
}
private void executeFlowAndUpdateExecID(final TriggerInstance triggerInst) {
try {
final Project project = triggerInst.getProject();
final Flow flow = FlowUtils.getFlow(project, triggerInst.getFlowId());
final ExecutableFlow executableFlow = FlowUtils.createExecutableFlow(project, flow);
// execute the flow with default execution option(concurrency option being "ignore
// currently running")
this.executorManager.submitExecutableFlow(executableFlow, triggerInst.getSubmitUser());
triggerInst.setFlowExecId(executableFlow.getExecutionId());
} catch (final Exception ex) {
logger.error("exception when executing the associated flow and updating flow exec id for "
+ "trigger instance[id: {}]",
triggerInst.getId(), ex);
// if flow fails to be executed(e.g. running execution exceeds the allowed concurrent run
// limit), set associated flow exec id to Constants.FAILED_EXEC_ID. Upon web server
// restart, recovery process will skip those flows.
triggerInst.setFlowExecId(Constants.FAILED_EXEC_ID);
}
this.flowTriggerInstanceLoader.updateAssociatedFlowExecId(triggerInst);
}
private String generateFailureEmailSubject(final TriggerInstance triggerInstance) {
return String.format(FAILURE_EMAIL_SUBJECT, triggerInstance.getFlowId(), triggerInstance
.getProjectName(), this.emailer.getAzkabanName());
}
private EmailMessage createFlowTriggerFailureEmailMessage(final TriggerInstance triggerInst) {
final EmailMessage message = this.emailer.createEmailMessage(generateFailureEmailSubject
(triggerInst), "text/html", triggerInst.getFailureEmails());
final SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
message.addAllToAddress(triggerInst.getFailureEmails());
message.setMimeType("text/html");
message.println("<table>");
message.println("<tr><td>Start Time</td><td>");
message.println("<tr><td>" + sdf.format(new Date(triggerInst.getStartTime())) + "</td><td>");
message.println("<tr><td>End Time</td><td>");
message.println("<tr><td>" + sdf.format(new Date(triggerInst.getEndTime())) + "</td><td>");
message.println("<tr><td>Duration</td><td>"
+ TimeUtils.formatDuration(triggerInst.getStartTime(), triggerInst.getEndTime())
+ "</td></tr>");
message.println("<tr><td>Status</td><td>" + triggerInst.getStatus() + "</td></tr>");
message.println("</table>");
message.println("");
final String executionUrl = this.emailer.getAzkabanURL() + "/executor?triggerinstanceid="
+ triggerInst.getId();
message.println("<a href=\"" + executionUrl + "\">" + triggerInst.getFlowId()
+ " Flow Trigger Instance Link</a>");
message.println("");
message.println("<h3>Cancelled Dependencies</h3>");
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
if (depInst.getStatus() == Status.CANCELLED) {
message.println("<table>");
message.println("<tr><td>Dependency Name: " + depInst.getDepName() + "</td><td>");
message
.println("<tr><td>Cancellation Cause: " + depInst.getCancellationCause() + "</td><td>");
message.println("</table>");
}
}
return message;
}
private void sendFailureEmailIfConfigured(final TriggerInstance triggerInstance) {
final List<String> failureEmails = triggerInstance.getFailureEmails();
if (!failureEmails.isEmpty()) {
final EmailMessage message = this.createFlowTriggerFailureEmailMessage(triggerInstance);
this.emailer.sendEmail(message, true, "email message failure email for flow trigger "
+ triggerInstance.getId());
}
}
/**
* Process the case where status of trigger instance becomes success
*/
public void processSucceed(final TriggerInstance triggerInst) {
//todo chengren311: publishing Trigger events to Azkaban Project Events page
this.executorService.submit(() -> executeFlowAndUpdateExecID(triggerInst));
}
/**
* Process the case where status of trigger instance becomes cancelled
*/
public void processTermination(final TriggerInstance triggerInst) {
//sendFailureEmailIfConfigured takes 1/3 secs
this.executorService.submit(() -> sendFailureEmailIfConfigured(triggerInst));
}
/**
* Process the case where a new trigger instance is created
*/
public void processNewInstance(final TriggerInstance triggerInst) {
this.flowTriggerInstanceLoader.uploadTriggerInstance(triggerInst);
}
public void shutdown() {
this.executorService.shutdown();
this.executorService.shutdownNow();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/database/FlowTriggerInstanceLoader.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.database;
import azkaban.flowtrigger.DependencyInstance;
import azkaban.flowtrigger.TriggerInstance;
import java.util.Collection;
public interface FlowTriggerInstanceLoader {
/**
* Upload a trigger instance into db
*/
void uploadTriggerInstance(TriggerInstance triggerInstance);
/**
* Update dependency status, cancellation cause and end time
*/
void updateDependencyExecutionStatus(DependencyInstance depInst);
/**
* Retrieve trigger instances not in done state(cancelling, running, or succeeded but associated
* flow hasn't been triggered yet). This is used when recovering unfinished
* trigger instance during web server restarts.
*/
Collection<TriggerInstance> getIncompleteTriggerInstances();
/**
* Update associated flow execution id for a trigger instance. This will be called when a trigger
* instance successfully starts a flow.
*/
void updateAssociatedFlowExecId(TriggerInstance triggerInst);
/**
* Retrieve recently finished trigger instances.
*
* @param limit number of trigger instances to retrieve
*/
Collection<TriggerInstance> getRecentlyFinished(int limit);
/**
* Retrieve running trigger instances.
*/
Collection<TriggerInstance> getRunning();
TriggerInstance getTriggerInstanceById(String triggerInstanceId);
TriggerInstance getTriggerInstanceByFlowExecId(int execId);
Collection<TriggerInstance> getTriggerInstances(int projectId, String flowId, int from, int
length);
/**
* Delete cancelled or succeeded trigger instances whose endtime is older than the timestamp
*
* @return number of deleted rows(dependency instances) ;
*/
int deleteTriggerExecutionsFinishingOlderThan(long timestamp);
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/database/JdbcFlowTriggerInstanceLoaderImpl.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.database;
import azkaban.Constants;
import azkaban.db.DatabaseOperator;
import azkaban.db.SQLTransaction;
import azkaban.flowtrigger.CancellationCause;
import azkaban.flowtrigger.DependencyException;
import azkaban.flowtrigger.DependencyInstance;
import azkaban.flowtrigger.Status;
import azkaban.flowtrigger.TriggerInstance;
import azkaban.project.FlowLoaderUtils;
import azkaban.project.FlowTrigger;
import azkaban.project.Project;
import azkaban.project.ProjectLoader;
import azkaban.project.ProjectManager;
import com.google.common.io.Files;
import java.io.File;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.dbutils.ResultSetHandler;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class JdbcFlowTriggerInstanceLoaderImpl implements FlowTriggerInstanceLoader {
private static final Logger logger = LoggerFactory
.getLogger(JdbcFlowTriggerInstanceLoaderImpl.class);
private static final String[] DEPENDENCY_EXECUTIONS_COLUMNS = {"trigger_instance_id", "dep_name",
"starttime", "endtime", "dep_status", "cancelleation_cause", "project_id", "project_version",
"flow_id", "flow_version", "flow_exec_id"};
private static final String DEPENDENCY_EXECUTION_TABLE = "execution_dependencies";
private static final String INSERT_DEPENDENCY = String.format("INSERT INTO %s(%s) VALUES(%s);"
+ "", DEPENDENCY_EXECUTION_TABLE, StringUtils.join
(DEPENDENCY_EXECUTIONS_COLUMNS, ","), String.join(",", Collections.nCopies
(DEPENDENCY_EXECUTIONS_COLUMNS.length, "?")));
private static final String UPDATE_DEPENDENCY_STATUS_ENDTIME_AND_CANCELLEATION_CAUSE = String
.format
("UPDATE %s SET dep_status = ?, endtime = ?, cancelleation_cause = ? WHERE trigger_instance_id = "
+ "? AND dep_name = ? ;", DEPENDENCY_EXECUTION_TABLE);
private static final String SELECT_EXECUTIONS_BY_INSTANCE_ID =
String.format("SELECT %s FROM %s WHERE trigger_instance_id = ?",
StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","),
DEPENDENCY_EXECUTION_TABLE);
private static final String SELECT_EXECUTIONS_BY_EXEC_ID =
String.format("SELECT %s FROM %s WHERE flow_exec_id = ?",
StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","),
DEPENDENCY_EXECUTION_TABLE);
private static final String SELECT_ALL_PENDING_EXECUTIONS =
String.format(
"SELECT %s FROM %s WHERE trigger_instance_id in (SELECT trigger_instance_id FROM %s "
+ "WHERE "
+ "dep_status = %s or dep_status = %s or (dep_status = %s and "
+ "flow_exec_id = %s))",
StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","),
DEPENDENCY_EXECUTION_TABLE,
DEPENDENCY_EXECUTION_TABLE,
Status.RUNNING.ordinal(), Status.CANCELLING.ordinal(),
Status.SUCCEEDED.ordinal(),
Constants.UNASSIGNED_EXEC_ID);
private static final String SELECT_ALL_RUNNING_EXECUTIONS =
String.format(
"SELECT %s FROM %s WHERE trigger_instance_id in (SELECT trigger_instance_id FROM %s "
+ "WHERE dep_status = %s or dep_status = %s)",
StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","),
DEPENDENCY_EXECUTION_TABLE,
DEPENDENCY_EXECUTION_TABLE,
Status.RUNNING.ordinal(), Status.CANCELLING.ordinal());
private static final String SELECT_RECENTLY_FINISHED = String.format(
"SELECT execution_dependencies.trigger_instance_id,dep_name,starttime,endtime,dep_status,"
+ "cancelleation_cause,project_id,"
+ "project_version,flow_id,flow_version, flow_exec_id \n"
+ "FROM execution_dependencies JOIN (\n"
+ "SELECT trigger_instance_id FROM execution_dependencies where "
+ "trigger_instance_id not in (SELECT distinct(trigger_instance_id) FROM "
+ "execution_dependencies WHERE dep_status = %s or dep_status = %s)\n"
+ "GROUP BY trigger_instance_id ORDER BY max(endtime) DESC \n"
+ " limit %%s ) temp on execution_dependencies"
+ ".trigger_instance_id in (temp.trigger_instance_id);",
Status.RUNNING.ordinal(),
Status.CANCELLING.ordinal());
private static final String SELECT_RECENT_WITH_START_AND_LENGTH = String.format("SELECT %s FROM"
+ " %s WHERE trigger_instance_id IN (\n"
+ "SELECT trigger_instance_id FROM (\n"
+ "SELECT trigger_instance_id, min(starttime) AS trigger_start_time FROM %s"
+ " WHERE project_id = ? AND flow_id = ? GROUP BY "
+ "trigger_instance_id ORDER BY trigger_start_time DESC\n"
+ "LIMIT ? OFFSET ?) AS tmp);", StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","),
DEPENDENCY_EXECUTION_TABLE, DEPENDENCY_EXECUTION_TABLE);
private static final String SELECT_EXECUTION_OLDER_THAN =
String.format(
"SELECT %s FROM %s WHERE trigger_instance_id IN (SELECT "
+ "DISTINCT(trigger_instance_id) FROM %s WHERE endtime <= ? AND endtime != 0);",
StringUtils.join(DEPENDENCY_EXECUTIONS_COLUMNS, ","), DEPENDENCY_EXECUTION_TABLE,
DEPENDENCY_EXECUTION_TABLE);
private static final String DELETE_EXECUTIONS =
String.format("DELETE FROM %s WHERE trigger_instance_id IN (?);", DEPENDENCY_EXECUTION_TABLE);
private static final String UPDATE_DEPENDENCY_FLOW_EXEC_ID = String.format("UPDATE %s SET "
+ "flow_exec_id "
+ "= ? WHERE trigger_instance_id = ? AND dep_name = ? ;", DEPENDENCY_EXECUTION_TABLE);
private final ProjectLoader projectLoader;
private final DatabaseOperator dbOperator;
private final ProjectManager projectManager;
@Inject
public JdbcFlowTriggerInstanceLoaderImpl(final DatabaseOperator databaseOperator,
final ProjectLoader projectLoader, final ProjectManager projectManager) {
this.dbOperator = databaseOperator;
this.projectLoader = projectLoader;
this.projectManager = projectManager;
}
@Override
public Collection<TriggerInstance> getIncompleteTriggerInstances() {
final Collection<TriggerInstance> unfinished = new ArrayList<>();
try {
final Collection<TriggerInstance> triggerInsts = this.dbOperator
.query(SELECT_ALL_PENDING_EXECUTIONS,
new TriggerInstanceHandler(SORT_MODE.SORT_ON_START_TIME_ASC));
// select incomplete trigger instances
for (final TriggerInstance triggerInst : triggerInsts) {
if (!Status.isDone(triggerInst.getStatus()) || (triggerInst.getStatus() == Status.SUCCEEDED
&& triggerInst.getFlowExecId() == Constants.UNASSIGNED_EXEC_ID)) {
unfinished.add(triggerInst);
}
}
// backfilling flow trigger for unfinished trigger instances
// dedup flow config id with a set to avoid downloading/parsing same flow file multiple times
final Set<FlowConfigID> flowConfigIDSet = unfinished.stream()
.map(triggerInstance -> new FlowConfigID(triggerInstance.getProject().getId(),
triggerInstance.getProject().getVersion(), triggerInstance.getFlowId(),
triggerInstance.getFlowVersion())).collect(Collectors.toSet());
final Map<FlowConfigID, FlowTrigger> flowTriggers = new HashMap<>();
for (final FlowConfigID flowConfigID : flowConfigIDSet) {
final File tempDir = Files.createTempDir();
try {
final File flowFile = this.projectLoader
.getUploadedFlowFile(flowConfigID.getProjectId(), flowConfigID.getProjectVersion(),
flowConfigID.getFlowId() + ".flow", flowConfigID.getFlowVersion(), tempDir);
if (flowFile != null) {
final FlowTrigger flowTrigger = FlowLoaderUtils.getFlowTriggerFromYamlFile(flowFile);
if (flowTrigger != null) {
flowTriggers.put(flowConfigID, flowTrigger);
}
} else {
logger.error("Unable to find flow file for " + flowConfigID);
}
} catch (final Exception ex) {
logger.error("error in getting flow file", ex);
} finally {
FlowLoaderUtils.cleanUpDir(tempDir);
}
}
for (final TriggerInstance triggerInst : unfinished) {
triggerInst.setFlowTrigger(flowTriggers.get(new FlowConfigID(triggerInst.getProject()
.getId(), triggerInst.getProject().getVersion(), triggerInst.getFlowId(),
triggerInst.getFlowVersion())));
}
} catch (final SQLException ex) {
handleSQLException(ex);
}
return unfinished;
}
private void handleSQLException(final SQLException ex)
throws DependencyException {
final String error = "exception when accessing db!";
logger.error(error, ex);
throw new DependencyException(error, ex);
}
@Override
public void updateAssociatedFlowExecId(final TriggerInstance triggerInst) {
final SQLTransaction<Integer> insertTrigger = transOperator -> {
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
transOperator
.update(UPDATE_DEPENDENCY_FLOW_EXEC_ID, triggerInst.getFlowExecId(),
triggerInst.getId(), depInst.getDepName());
}
return null;
};
executeTransaction(insertTrigger);
}
private void executeUpdate(final String query, final Object... params) {
try {
this.dbOperator.update(query, params);
} catch (final SQLException ex) {
handleSQLException(ex);
}
}
private void executeTransaction(final SQLTransaction<Integer> tran) {
try {
this.dbOperator.transaction(tran);
} catch (final SQLException ex) {
handleSQLException(ex);
}
}
@Override
public void uploadTriggerInstance(final TriggerInstance triggerInst) {
final SQLTransaction<Integer> insertTrigger = transOperator -> {
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
transOperator
.update(INSERT_DEPENDENCY, triggerInst.getId(), depInst.getDepName(),
depInst.getStartTime(),
depInst.getEndTime(),
depInst.getStatus().ordinal(),
depInst.getCancellationCause().ordinal(),
triggerInst.getProject().getId(),
triggerInst.getProject().getVersion(),
triggerInst.getFlowId(),
triggerInst.getFlowVersion(),
triggerInst.getFlowExecId());
}
return null;
};
executeTransaction(insertTrigger);
}
@Override
public void updateDependencyExecutionStatus(final DependencyInstance depInst) {
executeUpdate(UPDATE_DEPENDENCY_STATUS_ENDTIME_AND_CANCELLEATION_CAUSE,
depInst.getStatus().ordinal(),
depInst.getEndTime(),
depInst.getCancellationCause().ordinal(),
depInst.getTriggerInstance().getId(),
depInst.getDepName());
}
/**
* Retrieve recently finished trigger instances, but flow trigger properties are not populated
* into the returned trigger instances for efficiency. Flow trigger properties will be
* retrieved only on request time.
*/
@Override
public Collection<TriggerInstance> getRecentlyFinished(final int limit) {
final String query = String.format(SELECT_RECENTLY_FINISHED, limit);
try {
return this.dbOperator
.query(query, new TriggerInstanceHandler(SORT_MODE.SORT_ON_START_TIME_ASC));
} catch (final SQLException ex) {
handleSQLException(ex);
}
return Collections.emptyList();
}
@Override
public Collection<TriggerInstance> getRunning() {
try {
//todo chengren311:
// 1. add index for the execution_dependencies table to accelerate selection.
return this.dbOperator.query(SELECT_ALL_RUNNING_EXECUTIONS, new TriggerInstanceHandler
(SORT_MODE.SORT_ON_START_TIME_ASC));
} catch (final SQLException ex) {
handleSQLException(ex);
}
return Collections.emptyList();
}
private void populateFlowTriggerProperties(final TriggerInstance triggerInstance) {
if (triggerInstance != null) {
final int projectId = triggerInstance.getProject().getId();
final int projectVersion = triggerInstance.getProject().getVersion();
final String flowFileName = triggerInstance.getFlowId() + ".flow";
final int flowVersion = triggerInstance.getFlowVersion();
final File tempDir = Files.createTempDir();
try {
final File flowFile = this.projectLoader
.getUploadedFlowFile(projectId, projectVersion, flowFileName, flowVersion, tempDir);
if (flowFile != null) {
final FlowTrigger flowTrigger = FlowLoaderUtils.getFlowTriggerFromYamlFile(flowFile);
if (flowTrigger != null) {
triggerInstance.setFlowTrigger(flowTrigger);
}
} else {
logger.error("Unable to find flow file for " + triggerInstance);
}
} catch (final Exception ex) {
logger.error("error in getting flow file", ex);
} finally {
FlowLoaderUtils.cleanUpDir(tempDir);
}
}
}
/**
* Retrieve a trigger instance given a flow execution id. Flow trigger properties will
* also be populated into the returned trigger instance. If flow exec id is -1 or -2, then
* null will be returned.
*/
@Override
public TriggerInstance getTriggerInstanceByFlowExecId(final int flowExecId) {
if (flowExecId == Constants.FAILED_EXEC_ID || flowExecId == Constants.UNASSIGNED_EXEC_ID) {
return null;
}
TriggerInstance triggerInstance = null;
try {
final Collection<TriggerInstance> res = this.dbOperator
.query(SELECT_EXECUTIONS_BY_EXEC_ID,
new TriggerInstanceHandler(SORT_MODE.SORT_ON_START_TIME_ASC), flowExecId);
triggerInstance = !res.isEmpty() ? res.iterator().next() : null;
} catch (final SQLException ex) {
handleSQLException(ex);
}
populateFlowTriggerProperties(triggerInstance);
return triggerInstance;
}
@Override
/**
* Retrieve sorted trigger instances on start time in descending order
* given projectId, flowId, start position and length.
* @param projectId
* @param flowId
* @param from starting position of the range of trigger instance to retrieve
* @param length number of consecutive trigger instances to retrieve
*/
public Collection<TriggerInstance> getTriggerInstances(
final int projectId, final String flowId, final int from,
final int length) {
try {
final Collection<TriggerInstance> res = this.dbOperator
.query(SELECT_RECENT_WITH_START_AND_LENGTH, new TriggerInstanceHandler(SORT_MODE
.SORT_ON_START_TIME_DESC), projectId,
flowId, length, from);
return res;
} catch (final SQLException ex) {
handleSQLException(ex);
}
return Collections.emptyList();
}
@Override
public int deleteTriggerExecutionsFinishingOlderThan(final long timestamp) {
try {
final Collection<TriggerInstance> res = this.dbOperator
.query(SELECT_EXECUTION_OLDER_THAN,
new TriggerInstanceHandler(SORT_MODE.SORT_ON_START_TIME_DESC), timestamp);
final Set<String> toBeDeleted = new HashSet<>();
for (final TriggerInstance inst : res) {
if ((inst.getStatus() == Status.CANCELLED || (inst.getStatus() == Status.SUCCEEDED && inst
.getFlowExecId() != -1)) && inst.getEndTime() <= timestamp) {
toBeDeleted.add(inst.getId());
}
}
int numDeleted = 0;
if (!toBeDeleted.isEmpty()) {
final String ids = toBeDeleted.stream().map(s -> "'" + s + "'")
.collect(Collectors.joining(", "));
numDeleted = this.dbOperator.update(DELETE_EXECUTIONS.replace("?", ids));
}
logger.info("{} dependency instance record(s) deleted", numDeleted);
return numDeleted;
} catch (final SQLException ex) {
handleSQLException(ex);
return 0;
}
}
/**
* Retrieve a trigger instance given an instance id. Flow trigger properties will also be
* populated into the returned trigger instance.
*/
@Override
public TriggerInstance getTriggerInstanceById(final String triggerInstanceId) {
TriggerInstance triggerInstance = null;
try {
final Collection<TriggerInstance> res = this.dbOperator
.query(SELECT_EXECUTIONS_BY_INSTANCE_ID,
new TriggerInstanceHandler(SORT_MODE.SORT_ON_START_TIME_ASC),
triggerInstanceId);
triggerInstance = !res.isEmpty() ? res.iterator().next() : null;
} catch (final SQLException ex) {
handleSQLException(ex);
}
populateFlowTriggerProperties(triggerInstance);
return triggerInstance;
}
private enum SORT_MODE {
SORT_ON_START_TIME_DESC,
SORT_ON_START_TIME_ASC
}
public static class FlowConfigID {
private final int projectId;
private final int projectVerison;
private final String flowId;
private final int flowVersion;
public FlowConfigID(final int projectId, final int projectVerison, final String flowId,
final int flowVersion) {
this.projectId = projectId;
this.projectVerison = projectVerison;
this.flowId = flowId;
this.flowVersion = flowVersion;
}
public int getProjectId() {
return this.projectId;
}
public int getProjectVersion() {
return this.projectVerison;
}
public String getFlowId() {
return this.flowId;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final FlowConfigID that = (FlowConfigID) o;
return new EqualsBuilder()
.append(this.projectId, that.projectId)
.append(this.projectVerison, that.projectVerison)
.append(this.flowVersion, that.flowVersion)
.append(this.flowId, that.flowId)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(this.projectId)
.append(this.projectVerison)
.append(this.flowId)
.append(this.flowVersion)
.toHashCode();
}
public int getFlowVersion() {
return this.flowVersion;
}
}
private class TriggerInstanceHandler implements
ResultSetHandler<Collection<TriggerInstance>> {
private final SORT_MODE mode;
public TriggerInstanceHandler(final SORT_MODE mode) {
this.mode = mode;
}
@Override
public Collection<TriggerInstance> handle(final ResultSet rs) throws SQLException {
final Map<TriggerInstKey, List<DependencyInstance>> triggerInstMap = new HashMap<>();
while (rs.next()) {
final String triggerInstId = rs.getString(DEPENDENCY_EXECUTIONS_COLUMNS[0]);
final String depName = rs.getString(DEPENDENCY_EXECUTIONS_COLUMNS[1]);
final long startTime = rs.getLong(DEPENDENCY_EXECUTIONS_COLUMNS[2]);
final long endTime = rs.getLong(DEPENDENCY_EXECUTIONS_COLUMNS[3]);
final Status status = Status.values()[rs.getInt(DEPENDENCY_EXECUTIONS_COLUMNS[4])];
final CancellationCause cause = CancellationCause.values()[rs.getInt
(DEPENDENCY_EXECUTIONS_COLUMNS[5])];
final int projId = rs.getInt(DEPENDENCY_EXECUTIONS_COLUMNS[6]);
final int projVersion = rs.getInt(DEPENDENCY_EXECUTIONS_COLUMNS[7]);
final String flowId = rs.getString(DEPENDENCY_EXECUTIONS_COLUMNS[8]);
final int flowVersion = rs.getInt(DEPENDENCY_EXECUTIONS_COLUMNS[9]);
final Project project = JdbcFlowTriggerInstanceLoaderImpl.this.projectManager
.getProject(projId);
final int flowExecId = rs.getInt(DEPENDENCY_EXECUTIONS_COLUMNS[10]);
final TriggerInstKey key = new TriggerInstKey(triggerInstId, project.getLastModifiedUser(),
projId, projVersion, flowId, flowVersion, flowExecId, project);
List<DependencyInstance> dependencyInstanceList = triggerInstMap.get(key);
final DependencyInstance depInst = new DependencyInstance(depName, startTime, endTime,
null, status, cause);
if (dependencyInstanceList == null) {
dependencyInstanceList = new ArrayList<>();
triggerInstMap.put(key, dependencyInstanceList);
}
dependencyInstanceList.add(depInst);
}
final List<TriggerInstance> res = new ArrayList<>();
for (final Map.Entry<TriggerInstKey, List<DependencyInstance>> entry : triggerInstMap
.entrySet()) {
res.add(new TriggerInstance(entry.getKey().triggerInstId, null, entry.getKey()
.flowConfigID.flowId, entry.getKey().flowConfigID.flowVersion, entry.getKey()
.submitUser, entry.getValue(), entry.getKey().flowExecId, entry.getKey().project));
}
if (this.mode == SORT_MODE.SORT_ON_START_TIME_ASC) {
Collections.sort(res, Comparator.comparing(TriggerInstance::getStartTime));
} else if (this.mode == SORT_MODE.SORT_ON_START_TIME_DESC) {
Collections.sort(res, Comparator.comparing(TriggerInstance::getStartTime).reversed());
}
return res;
}
private class TriggerInstKey {
String triggerInstId;
FlowConfigID flowConfigID;
String submitUser;
int flowExecId;
Project project;
public TriggerInstKey(final String triggerInstId, final String submitUser, final int projId,
final int projVersion, final String flowId, final int flowVerion, final int flowExecId,
final Project project) {
this.triggerInstId = triggerInstId;
this.flowConfigID = new FlowConfigID(projId, projVersion, flowId, flowVerion);
this.submitUser = submitUser;
this.flowExecId = flowExecId;
this.project = project;
}
@Override
public boolean equals(final Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
final TriggerInstKey that = (TriggerInstKey) o;
return new EqualsBuilder()
.append(this.triggerInstId, that.triggerInstId)
.append(this.flowConfigID, that.flowConfigID)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(this.triggerInstId)
.append(this.flowConfigID)
.toHashCode();
}
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/plugin/FlowTriggerDependencyPluginException.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.plugin;
public class FlowTriggerDependencyPluginException extends Exception {
public FlowTriggerDependencyPluginException(final String message) {
super(message);
}
public FlowTriggerDependencyPluginException(final Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/plugin/FlowTriggerDependencyPluginManager.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.plugin;
import azkaban.flowtrigger.DependencyCheck;
import azkaban.flowtrigger.DependencyPluginConfig;
import azkaban.flowtrigger.DependencyPluginConfigImpl;
import azkaban.utils.Utils;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ConcurrentHashMap;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.commons.io.filefilter.WildcardFileFilter;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class FlowTriggerDependencyPluginManager {
public static final String CONFIG_FILE = "dependency.properties";
public static final String PRIVATE_CONFIG_FILE = "private.properties";
public static final String DEPENDENCY_CLASS = "dependency.class";
public static final String CLASS_PATH = "dependency.classpath";
private static final Logger logger = LoggerFactory
.getLogger(FlowTriggerDependencyPluginManager.class);
private final String pluginDir;
private final Map<String, DependencyCheck> dependencyTypeMap;
private final ClassLoader prevClassLoader;
@Inject
public FlowTriggerDependencyPluginManager(final String pluginDir)
throws FlowTriggerDependencyPluginException {
this.dependencyTypeMap = new ConcurrentHashMap<>();
this.pluginDir = pluginDir;
this.prevClassLoader = Thread.currentThread().getContextClassLoader();
}
/**
* retrieve files with wildcard matching.
* Only support "dir/*". Pattern like "dir/foo*" or "dir/*foo*" will not be supported
* since user shouldn't upload the jars they don't want to import
* the reason for supporting dir/* is to provide another packaging option
* which let user upload a dir of all required jars
* in addition to one fat jar.
*/
private File[] getFilesMatchingPath(final String path) {
if (path.endsWith("*")) {
final File dir = new File(path.substring(0, path.lastIndexOf("/") + 1));
final FileFilter fileFilter = new WildcardFileFilter(path.substring(path.lastIndexOf("/")
+ 1));
final File[] files = dir.listFiles(fileFilter);
return files;
} else {
return new File[]{new File(path)};
}
}
private Map<String, String> readConfig(final File file) throws
FlowTriggerDependencyPluginException {
final Properties props = new Properties();
InputStream input = null;
try {
input = new BufferedInputStream(new FileInputStream(file));
props.load(input);
} catch (final Exception e) {
logger.debug("unable to read the file " + file, e);
throw new FlowTriggerDependencyPluginException(e);
} finally {
try {
if (input != null) {
input.close();
}
} catch (final IOException e) {
logger.error("unable to close input stream when reading config from file " + file
.getAbsolutePath(), e);
}
}
return Maps.fromProperties(props);
}
private void validatePluginConfig(final DependencyPluginConfig pluginConfig)
throws FlowTriggerDependencyPluginException {
for (final String requiredField : ImmutableSet
.of(DEPENDENCY_CLASS, CLASS_PATH)) {
if (StringUtils.isEmpty(pluginConfig.get(requiredField))) {
throw new FlowTriggerDependencyPluginException("missing " + requiredField + " in "
+ "dependency plugin properties");
}
}
}
private DependencyPluginConfig mergePluginConfig(final Map<String, String> publicProps,
final Map<String, String> privateProps) throws FlowTriggerDependencyPluginException {
final Map<String, String> combined = new HashMap<>();
combined.putAll(publicProps);
combined.putAll(privateProps);
if (combined.size() != publicProps.size() + privateProps.size()) {
throw new FlowTriggerDependencyPluginException("duplicate property found in both public and"
+ " private properties");
}
return new DependencyPluginConfigImpl(combined);
}
private DependencyCheck createDependencyCheck(final DependencyPluginConfig pluginConfig)
throws FlowTriggerDependencyPluginException {
final String classPath = pluginConfig.get(CLASS_PATH);
final String[] cpList = classPath.split(",");
final List<URL> resources = new ArrayList<>();
try {
for (final String cp : cpList) {
final File[] files = getFilesMatchingPath(cp);
if (files != null) {
for (final File file : files) {
final URL cpItem = file.toURI().toURL();
if (!resources.contains(cpItem)) {
logger.info("adding to classpath " + cpItem);
resources.add(cpItem);
}
}
}
}
} catch (final Exception ex) {
throw new FlowTriggerDependencyPluginException(ex);
}
final ClassLoader dependencyClassloader = new ParentLastURLClassLoader(
resources.toArray(new URL[resources.size()]), this.getClass().getClassLoader());
Thread.currentThread().setContextClassLoader(dependencyClassloader);
Class<? extends DependencyCheck> clazz = null;
try {
clazz = (Class<? extends DependencyCheck>) dependencyClassloader.loadClass(pluginConfig.get
(DEPENDENCY_CLASS));
return (DependencyCheck) Utils.callConstructor(clazz);
} catch (final Exception ex) {
throw new FlowTriggerDependencyPluginException(ex);
}
}
public void loadDependencyPlugin(final File pluginDir)
throws FlowTriggerDependencyPluginException {
if (pluginDir.isDirectory() && pluginDir.canRead()) {
try {
final DependencyPluginConfig pluginConfig = createPluginConfig(pluginDir);
final DependencyCheck depCheck = createDependencyCheck(pluginConfig);
final String pluginName = getPluginName(pluginDir);
depCheck.init(pluginConfig);
this.dependencyTypeMap.put(pluginName, depCheck);
} catch (final Exception ex) {
logger.error("failed to initializing plugin in " + pluginDir, ex);
throw new FlowTriggerDependencyPluginException(ex);
}
}
}
/**
* Initialize all dependency plugins.
* todo chengren311: Current design aborts loadAllPlugins if any of the plugin fails to be
* initialized.
* However, this might not be the optimal design. Suppose we have two dependency plugin types
* - MySQL and Kafka, if MySQL is down, then kafka dependency type will also be unavailable.
*/
public void loadAllPlugins() throws FlowTriggerDependencyPluginException {
final File pluginDir = new File(this.pluginDir);
for (final File dir : pluginDir.listFiles()) {
loadDependencyPlugin(dir);
}
//reset thread context loader so that other azkaban class will be loaded with the old
// classloader
Thread.currentThread().setContextClassLoader(this.prevClassLoader);
}
private String getPluginName(final File dependencyPluginDir) {
//the name of the dependency plugin dir is treated as the name of the plugin
return dependencyPluginDir.getName();
}
private Map<String, String> readPublicConfig(final File publicConfigFile)
throws FlowTriggerDependencyPluginException {
return readConfig(publicConfigFile);
}
/**
* read config from private property file, if the file is not present, then return empty.
*/
private Map<String, String> readPrivateConfig(final File privateConfigFile) {
try {
return readConfig(privateConfigFile);
} catch (final Exception ex) {
return new HashMap<>();
}
}
private DependencyPluginConfig createPluginConfig(final File dir) throws
FlowTriggerDependencyPluginException {
final File publicConfigFile = new File(dir.getAbsolutePath() + "/" + CONFIG_FILE);
final File privateConfigFile = new File(dir.getAbsolutePath() + "/" + PRIVATE_CONFIG_FILE);
try {
final DependencyPluginConfig pluginConfig = mergePluginConfig(
readPublicConfig(publicConfigFile),
readPrivateConfig(privateConfigFile));
validatePluginConfig(pluginConfig);
return pluginConfig;
} catch (final FlowTriggerDependencyPluginException exception) {
throw new FlowTriggerDependencyPluginException("exception when initializing plugin "
+ "config in " + dir.getAbsolutePath() + ": " + exception.getMessage());
}
}
/**
* return or create a dependency check based on type
*
* @return if the dependencyCheck of the same type already exists, return the check,
* otherwise create a new one and return.
*/
public DependencyCheck getDependencyCheck(final String type) {
return this.dependencyTypeMap.get(type);
}
public void shutdown() {
for (final DependencyCheck depCheck : this.dependencyTypeMap.values()) {
try {
depCheck.shutdown();
} catch (final Exception ex) {
logger.error("failed to shutdown dependency check " + depCheck, ex);
}
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/plugin/ParentLastURLClassLoader.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.plugin;
import java.net.URL;
import java.net.URLClassLoader;
/**
* A parent-last classloader that will try the child classloader first and then the parent.
* Adopted from https://stackoverflow.com/questions/5445511/how-do-i-create-a-parent-last-child-first-classloader-in-java-or-how-to-overr
*/
public class ParentLastURLClassLoader extends ClassLoader {
private final ChildURLClassLoader childClassLoader;
public ParentLastURLClassLoader(final URL[] urls, final ClassLoader parentCL) {
super(parentCL);
this.childClassLoader = new ChildURLClassLoader(urls,
new FindClassClassLoader(this.getParent()));
}
@Override
protected synchronized Class<?> loadClass(final String name, final boolean resolve)
throws ClassNotFoundException {
try {
// first we try to find a class inside the child classloader
return this.childClassLoader.findClass(name);
} catch (final ClassNotFoundException e) {
// didn't find it, try the parent
return super.loadClass(name, resolve);
}
}
/**
* This class allows me to call findClass on a classloader
*/
private static class FindClassClassLoader extends ClassLoader {
public FindClassClassLoader(final ClassLoader parent) {
super(parent);
}
@Override
public Class<?> findClass(final String name) throws ClassNotFoundException {
return super.findClass(name);
}
}
/**
* This class delegates (child then parent) for the findClass method for a URLClassLoader.
* We need this because findClass is protected in URLClassLoader
*/
private static class ChildURLClassLoader extends URLClassLoader {
private final FindClassClassLoader realParent;
public ChildURLClassLoader(final URL[] urls, final FindClassClassLoader realParent) {
super(urls, null);
this.realParent = realParent;
}
@Override
public Class<?> findClass(final String name) throws ClassNotFoundException {
try {
final Class<?> loaded = super.findLoadedClass(name);
if (loaded != null) {
return loaded;
}
// first try to use the URLClassLoader findClass
return super.findClass(name);
} catch (final ClassNotFoundException e) {
// if that fails, we ask our real parent classloader to load the class (we give up)
return this.realParent.loadClass(name);
}
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/quartz/FlowTriggerQuartzJob.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.quartz;
import azkaban.flowtrigger.FlowTriggerService;
import azkaban.project.FlowTrigger;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.scheduler.AbstractQuartzJob;
import javax.inject.Inject;
import org.quartz.JobDataMap;
import org.quartz.JobExecutionContext;
public class FlowTriggerQuartzJob extends AbstractQuartzJob {
public static final String SUBMIT_USER = "SUBMIT_USER";
public static final String PROJECT_ID = "PROJECT_ID";
public static final String FLOW_TRIGGER = "FLOW_TRIGGER";
public static final String FLOW_ID = "FLOW_ID";
public static final String FLOW_VERSION = "FLOW_VERSION";
public static final String JOB_NAME = "FLOW_TRIGGER";
private final FlowTriggerService triggerService;
private final ProjectManager projectManager;
@Inject
public FlowTriggerQuartzJob(final FlowTriggerService service,
final ProjectManager projectManager) {
this.triggerService = service;
this.projectManager = projectManager;
}
@Override
public void execute(final JobExecutionContext context) {
final JobDataMap data = context.getMergedJobDataMap();
final int projectId = data.getInt(PROJECT_ID);
final Project project = this.projectManager.getProject(projectId);
final String flowId = data.getString(FLOW_ID);
final int flowVersion = data.getInt(FLOW_VERSION);
final String submitUser = data.getString(SUBMIT_USER);
final FlowTrigger flowTrigger = (FlowTrigger) data.get(FLOW_TRIGGER);
this.triggerService.startTrigger(flowTrigger, flowId, flowVersion, submitUser, project);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/flowtrigger/quartz/FlowTriggerScheduler.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.flowtrigger.quartz;
import static java.util.Objects.requireNonNull;
import azkaban.flow.Flow;
import azkaban.project.FlowLoaderUtils;
import azkaban.project.FlowTrigger;
import azkaban.project.Project;
import azkaban.project.ProjectLoader;
import azkaban.project.ProjectManager;
import azkaban.project.ProjectManagerException;
import azkaban.scheduler.QuartzJobDescription;
import azkaban.scheduler.QuartzScheduler;
import com.google.common.collect.ImmutableMap;
import com.google.common.io.Files;
import com.google.gson.GsonBuilder;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.quartz.JobDataMap;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class FlowTriggerScheduler {
private static final Logger logger = LoggerFactory.getLogger(FlowTriggerScheduler.class);
private final ProjectLoader projectLoader;
private final QuartzScheduler scheduler;
private final ProjectManager projectManager;
@Inject
public FlowTriggerScheduler(final ProjectLoader projectLoader, final QuartzScheduler scheduler,
final ProjectManager projectManager) {
this.projectLoader = requireNonNull(projectLoader);
this.scheduler = requireNonNull(scheduler);
this.projectManager = requireNonNull(projectManager);
}
/**
* Schedule flows containing flow triggers for this project.
*/
public void schedule(final Project project, final String submitUser)
throws ProjectManagerException, IOException, SchedulerException {
for (final Flow flow : project.getFlows()) {
//todo chengren311: we should validate embedded flow shouldn't have flow trigger defined.
if (flow.isEmbeddedFlow()) {
// skip scheduling embedded flow since embedded flow are not allowed to have flow trigger
continue;
}
final String flowFileName = flow.getId() + ".flow";
final int latestFlowVersion = this.projectLoader
.getLatestFlowVersion(flow.getProjectId(), flow
.getVersion(), flowFileName);
if (latestFlowVersion > 0) {
final File tempDir = Files.createTempDir();
final File flowFile;
try {
flowFile = this.projectLoader
.getUploadedFlowFile(project.getId(), project.getVersion(),
flowFileName, latestFlowVersion, tempDir);
final FlowTrigger flowTrigger = FlowLoaderUtils.getFlowTriggerFromYamlFile(flowFile);
if (flowTrigger != null) {
final Map<String, Object> contextMap = ImmutableMap
.of(FlowTriggerQuartzJob.SUBMIT_USER, submitUser,
FlowTriggerQuartzJob.FLOW_TRIGGER, flowTrigger,
FlowTriggerQuartzJob.FLOW_ID, flow.getId(),
FlowTriggerQuartzJob.FLOW_VERSION, latestFlowVersion,
FlowTriggerQuartzJob.PROJECT_ID, project.getId());
final boolean scheduleSuccess = this.scheduler
.scheduleJobIfAbsent(flowTrigger.getSchedule().getCronExpression(),
new QuartzJobDescription
(FlowTriggerQuartzJob.class, FlowTriggerQuartzJob.JOB_NAME,
generateGroupName(flow), contextMap));
if (scheduleSuccess) {
logger.info("Successfully registered flow {}.{} to scheduler", project.getName(),
flow.getId());
} else {
logger.info("Fail to register a duplicate flow {}.{} to scheduler", project.getName(),
flow.getId());
}
}
} catch (final SchedulerException | IOException ex) {
logger.error("Error in registering flow {}.{}", project.getName(), flow.getId(), ex);
throw ex;
} finally {
FlowLoaderUtils.cleanUpDir(tempDir);
}
}
}
}
public boolean pauseFlowTriggerIfPresent(final int projectId, final String flowId)
throws SchedulerException {
return this.scheduler
.pauseJobIfPresent(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(projectId, flowId));
}
public boolean resumeFlowTriggerIfPresent(final int projectId, final String flowId) throws
SchedulerException {
return this.scheduler
.resumeJobIfPresent(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(projectId, flowId));
}
/**
* Retrieve the list of scheduled flow triggers from quartz database
*/
public List<ScheduledFlowTrigger> getScheduledFlowTriggerJobs() {
try {
final Scheduler quartzScheduler = this.scheduler.getScheduler();
final List<String> groupNames = quartzScheduler.getJobGroupNames();
final List<ScheduledFlowTrigger> flowTriggerJobDetails = new ArrayList<>();
for (final String groupName : groupNames) {
final JobKey jobKey = new JobKey(FlowTriggerQuartzJob.JOB_NAME, groupName);
ScheduledFlowTrigger scheduledFlowTrigger = null;
try {
final JobDetail job = quartzScheduler.getJobDetail(jobKey);
final JobDataMap jobDataMap = job.getJobDataMap();
final String flowId = jobDataMap.getString(FlowTriggerQuartzJob.FLOW_ID);
final int projectId = jobDataMap.getInt(FlowTriggerQuartzJob.PROJECT_ID);
final FlowTrigger flowTrigger = (FlowTrigger) jobDataMap
.get(FlowTriggerQuartzJob.FLOW_TRIGGER);
final String submitUser = jobDataMap.getString(FlowTriggerQuartzJob.SUBMIT_USER);
final List<? extends Trigger> quartzTriggers = quartzScheduler.getTriggersOfJob(jobKey);
final boolean isPaused = this.scheduler
.isJobPaused(FlowTriggerQuartzJob.JOB_NAME, groupName);
final Project project = projectManager.getProject(projectId);
final Flow flow = project.getFlow(flowId);
scheduledFlowTrigger = new ScheduledFlowTrigger(projectId,
this.projectManager.getProject(projectId).getName(),
flowId, flowTrigger, submitUser, quartzTriggers.isEmpty() ? null
: quartzTriggers.get(0), isPaused, flow.isLocked());
} catch (final Exception ex) {
logger.error("Unable to get flow trigger by job key {}", jobKey, ex);
scheduledFlowTrigger = null;
}
flowTriggerJobDetails.add(scheduledFlowTrigger);
}
return flowTriggerJobDetails;
} catch (final Exception ex) {
logger.error("Unable to get scheduled flow triggers", ex);
return new ArrayList<>();
}
}
/**
* Unschedule all possible flows in a project
*/
public void unschedule(final Project project) throws SchedulerException {
for (final Flow flow : project.getFlows()) {
if (!flow.isEmbeddedFlow()) {
try {
if (this.scheduler
.unscheduleJob(FlowTriggerQuartzJob.JOB_NAME, generateGroupName(flow))) {
logger.info("Flow {}.{} unregistered from scheduler", project.getName(), flow.getId());
}
} catch (final SchedulerException e) {
logger.error("Fail to unregister flow from scheduler {}.{}", project.getName(),
flow.getId(), e);
throw e;
}
}
}
}
private String generateGroupName(final Flow flow) {
return generateGroupName(flow.getProjectId(), flow.getId());
}
private String generateGroupName(final int projectId, final String flowId) {
return String.valueOf(projectId) + "." + flowId;
}
public void start() throws SchedulerException {
this.scheduler.start();
}
public void shutdown() throws SchedulerException {
this.scheduler.shutdown();
}
public static class ScheduledFlowTrigger {
private final int projectId;
private final String projectName;
private final String flowId;
private final FlowTrigger flowTrigger;
private final Trigger quartzTrigger;
private final String submitUser;
private final boolean isPaused;
private final boolean isLocked;
public ScheduledFlowTrigger(final int projectId, final String projectName, final String flowId,
final FlowTrigger flowTrigger, final String submitUser,
final Trigger quartzTrigger, final boolean isPaused, final boolean isLocked) {
this.projectId = projectId;
this.projectName = projectName;
this.flowId = flowId;
this.flowTrigger = flowTrigger;
this.submitUser = submitUser;
this.quartzTrigger = quartzTrigger;
this.isPaused = isPaused;
this.isLocked = isLocked;
}
public boolean isPaused() {
return this.isPaused;
}
public int getProjectId() {
return this.projectId;
}
public String getProjectName() {
return this.projectName;
}
public String getFlowId() {
return this.flowId;
}
public FlowTrigger getFlowTrigger() {
return this.flowTrigger;
}
public String getDependencyListJson() {
return new GsonBuilder().setPrettyPrinting().create()
.toJson(this.flowTrigger.getDependencies());
}
public Trigger getQuartzTrigger() {
return this.quartzTrigger;
}
public String getSubmitUser() {
return this.submitUser;
}
public boolean isLocked() { return this.isLocked; }
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/scheduler/AbstractQuartzJob.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.scheduler;
import java.io.Serializable;
import org.quartz.Job;
import org.quartz.JobExecutionContext;
public abstract class AbstractQuartzJob implements Job {
/**
* Cast the object to the original one with the type. The object must extend Serializable.
*/
protected static <T extends Serializable> T asT(final Object service, final Class<T> type) {
return type.cast(service);
}
@Override
public abstract void execute(JobExecutionContext context);
protected Object getKey(final JobExecutionContext context, final String key) {
return context.getMergedJobDataMap().get(key);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/scheduler/QuartzJobDescription.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.scheduler;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
/**
* Manage one quartz job's variables. Every AZ Quartz Job should come with a QuartzJobDescription.
*/
public class QuartzJobDescription<T extends AbstractQuartzJob> {
private final String groupName;
private final String jobName;
private final Class<T> jobClass;
private final Map<String, ? extends Serializable> contextMap;
public QuartzJobDescription(final Class<T> jobClass,
final String jobName, final String groupName,
final Map<String, ? extends Serializable> contextMap) {
/**
* This check is necessary for raw type. Please see test
* {@link QuartzJobDescriptionTest#testCreateQuartzJobDescription2}
*/
if (jobClass.getSuperclass() != AbstractQuartzJob.class) {
throw new ClassCastException("jobClass must extend AbstractQuartzJob class");
}
this.jobClass = jobClass;
this.jobName = jobName;
this.groupName = groupName;
this.contextMap = contextMap;
}
public QuartzJobDescription(final Class<T> jobClass,
final String jobName, final String groupName) {
this(jobClass, jobName, groupName, new HashMap<String, String>());
}
public String getJobName() {
return jobName;
}
public Class<? extends AbstractQuartzJob> getJobClass() {
return this.jobClass;
}
public Map<String, ? extends Serializable> getContextMap() {
return this.contextMap;
}
@Override
public String toString() {
return "QuartzJobDescription{" +
"jobClass=" + this.jobClass +
", groupName='" + this.groupName + '\'' +
", contextMap=" + this.contextMap +
'}';
}
public String getGroupName() {
return this.groupName;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/scheduler/QuartzScheduler.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.scheduler;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import static java.util.Objects.requireNonNull;
import azkaban.Constants.ConfigurationKeys;
import azkaban.utils.Props;
import com.google.common.annotations.VisibleForTesting;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.quartz.CronExpression;
import org.quartz.CronScheduleBuilder;
import org.quartz.JobBuilder;
import org.quartz.JobDetail;
import org.quartz.JobKey;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.Trigger;
import org.quartz.Trigger.TriggerState;
import org.quartz.TriggerBuilder;
import org.quartz.impl.StdSchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages Quartz schedules. Azkaban regards QuartzJob and QuartzTrigger as an one-to-one
* mapping.
* Quartz job key naming standard:
* Job key is composed of job name and group name. Job type denotes job name. Project id+flow
* name denotes group name.
* E.x FLOW_TRIGGER as job name, 1.flow1 as group name
*/
@Singleton
public class QuartzScheduler {
private static final Logger logger = LoggerFactory.getLogger(QuartzScheduler.class);
private Scheduler scheduler = null;
@Inject
public QuartzScheduler(final Props azProps) throws SchedulerException {
if (!azProps.getBoolean(ConfigurationKeys.ENABLE_QUARTZ, false)) {
return;
}
// TODO kunkun-tang: Many quartz properties should be defaulted such that not necessarily being
// checked into azkaban.properties. Also, we need to only assemble Quartz related properties
// here, which should be done in Azkaban WebServer Guice Module.
final StdSchedulerFactory schedulerFactory =
new StdSchedulerFactory(azProps.toAllProperties());
this.scheduler = schedulerFactory.getScheduler();
// Currently Quartz only support internal job schedules. When we migrate to User Production
// flows, we need to construct a Guice-Free JobFactory for use.
this.scheduler.setJobFactory(SERVICE_PROVIDER.getInstance(SchedulerJobFactory.class));
}
public void start() throws SchedulerException {
this.scheduler.start();
logger.info("Quartz Scheduler started.");
}
@VisibleForTesting
void cleanup() throws SchedulerException {
this.scheduler.clear();
}
public void shutdown() throws SchedulerException {
this.scheduler.shutdown();
logger.info("Quartz Scheduler shut down.");
}
/**
* Pause a job if it's present.
* @param jobName
* @param groupName
* @return true if job has been paused, no if job doesn't exist.
* @throws SchedulerException
*/
public synchronized boolean pauseJobIfPresent(final String jobName, final String groupName)
throws SchedulerException {
if (ifJobExist(jobName, groupName)) {
this.scheduler.pauseJob(new JobKey(jobName, groupName));
return true;
} else {
return false;
}
}
/**
* Check if job is paused.
*
* @return true if job is paused, false otherwise.
*/
public synchronized boolean isJobPaused(final String jobName, final String groupName)
throws SchedulerException {
if (!ifJobExist(jobName, groupName)) {
throw new SchedulerException(String.format("Job (job name %s, group name %s) doesn't "
+ "exist'", jobName, groupName));
}
final JobKey jobKey = new JobKey(jobName, groupName);
final JobDetail jobDetail = this.scheduler.getJobDetail(jobKey);
final List<? extends Trigger> triggers = this.scheduler.getTriggersOfJob(jobDetail.getKey());
for (final Trigger trigger : triggers) {
final TriggerState triggerState = this.scheduler.getTriggerState(trigger.getKey());
if (TriggerState.PAUSED.equals(triggerState)) {
return true;
}
}
return false;
}
/**
* Resume a job.
* @param jobName
* @param groupName
* @return true the job has been resumed, no if the job doesn't exist.
* @throws SchedulerException
*/
public synchronized boolean resumeJobIfPresent(final String jobName, final String groupName)
throws SchedulerException {
if (ifJobExist(jobName, groupName)) {
this.scheduler.resumeJob(new JobKey(jobName, groupName));
return true;
} else {
return false;
}
}
/**
* Unschedule a job.
* @param jobName
* @param groupName
* @return true if job is found and unscheduled.
* @throws SchedulerException
*/
public synchronized boolean unscheduleJob(final String jobName, final String groupName) throws
SchedulerException {
return this.scheduler.deleteJob(new JobKey(jobName, groupName));
}
/**
* Only cron schedule register is supported. Since register might be called when
* concurrently uploading projects, so synchronized is added to ensure thread safety.
*
* @param cronExpression the cron schedule for this job
* @param jobDescription Regarding QuartzJobDescription#groupName, in order to guarantee no
* duplicate quartz schedules, we design the naming convention depending on use cases: <ul>
* <li>User flow schedule: we use {@link JobKey#JobKey} to represent the identity of a
* flow's schedule. The format follows "$projectID.$flowName" to guarantee no duplicates.
* <li>Quartz schedule for AZ internal use: the groupName should start with letters, rather
* than
* number, which is the first case.</ul>
*
* @return true if job has been scheduled, false if the same job exists already.
*/
public synchronized boolean scheduleJobIfAbsent(final String cronExpression, final QuartzJobDescription
jobDescription) throws SchedulerException {
requireNonNull(jobDescription, "jobDescription is null");
if (ifJobExist(jobDescription.getJobName(), jobDescription.getGroupName())) {
logger.warn(String.format("can not register existing job with job name: "
+ "%s and group name: %s", jobDescription.getJobName(), jobDescription.getGroupName()));
return false;
}
if (!CronExpression.isValidExpression(cronExpression)) {
throw new SchedulerException(
"The cron expression string <" + cronExpression + "> is not valid.");
}
// TODO kunkun-tang: we will modify this when we start supporting multi schedules per flow.
final JobDetail job = JobBuilder.newJob(jobDescription.getJobClass())
.withIdentity(jobDescription.getJobName(), jobDescription.getGroupName()).build();
// Add external dependencies to Job Data Map.
job.getJobDataMap().putAll(jobDescription.getContextMap());
// TODO kunkun-tang: Need management code to deal with different misfire policy
final Trigger trigger = TriggerBuilder
.newTrigger()
.withSchedule(
CronScheduleBuilder.cronSchedule(cronExpression)
.withMisfireHandlingInstructionFireAndProceed()
)
.build();
this.scheduler.scheduleJob(job, trigger);
logger.info("Quartz Schedule with jobDetail " + job.getDescription() + " is registered.");
return true;
}
@VisibleForTesting
boolean ifJobExist(final String jobName, final String groupName)
throws SchedulerException {
return this.scheduler.getJobDetail(new JobKey(jobName, groupName)) != null;
}
public Scheduler getScheduler() {
return this.scheduler;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/scheduler/SchedulerJobFactory.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.scheduler;
import com.google.inject.Injector;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.quartz.Job;
import org.quartz.Scheduler;
import org.quartz.SchedulerException;
import org.quartz.spi.JobFactory;
import org.quartz.spi.TriggerFiredBundle;
/**
* Produce Guice-able Job in this custom defined Job Factory.
*
* In order to allow Quaratz jobs easily inject dependency, we create this factory. Every Quartz job
* will be constructed by newJob method.
*/
@Singleton
public class SchedulerJobFactory implements JobFactory {
private final Injector injector;
@Inject
public SchedulerJobFactory(final Injector injector) {
this.injector = injector;
}
@Override
public Job newJob(final TriggerFiredBundle bundle, final Scheduler scheduler)
throws SchedulerException {
return (Job) this.injector.getInstance(bundle.getJobDetail()
.getJobClass());
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/AzkabanWebServer.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import static java.util.Objects.requireNonNull;
import azkaban.AzkabanCommonModule;
import azkaban.Constants;
import azkaban.Constants.ConfigurationKeys;
import azkaban.database.AzkabanDatabaseSetup;
import azkaban.executor.ExecutionController;
import azkaban.executor.ExecutorManager;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.flowtrigger.FlowTriggerService;
import azkaban.flowtrigger.quartz.FlowTriggerScheduler;
import azkaban.jmx.JmxExecutionController;
import azkaban.jmx.JmxExecutorManager;
import azkaban.jmx.JmxJettyServer;
import azkaban.jmx.JmxTriggerManager;
import azkaban.metrics.MetricsManager;
import azkaban.project.ProjectManager;
import azkaban.scheduler.ScheduleManager;
import azkaban.server.AzkabanServer;
import azkaban.server.IMBeanRegistrable;
import azkaban.server.MBeanRegistrationManager;
import azkaban.server.session.SessionCache;
import azkaban.trigger.TriggerManager;
import azkaban.trigger.TriggerManagerException;
import azkaban.trigger.builtin.BasicTimeChecker;
import azkaban.trigger.builtin.CreateTriggerAction;
import azkaban.trigger.builtin.ExecuteFlowAction;
import azkaban.trigger.builtin.ExecutionChecker;
import azkaban.trigger.builtin.KillExecutionAction;
import azkaban.trigger.builtin.SlaAlertAction;
import azkaban.trigger.builtin.SlaChecker;
import azkaban.user.UserManager;
import azkaban.utils.FileIOUtils;
import azkaban.utils.PluginUtils;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.utils.StdOutErrRedirect;
import azkaban.utils.Utils;
import azkaban.webapp.plugin.PluginRegistry;
import azkaban.webapp.plugin.TriggerPlugin;
import azkaban.webapp.plugin.ViewerPlugin;
import azkaban.webapp.servlet.AbstractAzkabanServlet;
import azkaban.webapp.servlet.ExecutorServlet;
import azkaban.webapp.servlet.FlowTriggerInstanceServlet;
import azkaban.webapp.servlet.FlowTriggerServlet;
import azkaban.webapp.servlet.HistoryServlet;
import azkaban.webapp.servlet.IndexRedirectServlet;
import azkaban.webapp.servlet.JMXHttpServlet;
import azkaban.webapp.servlet.NoteServlet;
import azkaban.webapp.servlet.ProjectManagerServlet;
import azkaban.webapp.servlet.ProjectServlet;
import azkaban.webapp.servlet.ScheduleServlet;
import azkaban.webapp.servlet.StatsServlet;
import azkaban.webapp.servlet.StatusServlet;
import azkaban.webapp.servlet.TriggerManagerServlet;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.linkedin.restli.server.RestliServlet;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.Constructor;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import javax.inject.Inject;
import javax.inject.Singleton;
import javax.management.ObjectName;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.log4j.jmx.HierarchyDynamicMBean;
import org.apache.velocity.app.VelocityEngine;
import org.joda.time.DateTimeZone;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.servlet.Context;
import org.mortbay.jetty.servlet.DefaultServlet;
import org.mortbay.jetty.servlet.ServletHolder;
import org.mortbay.thread.QueuedThreadPool;
/**
* The Azkaban Jetty server class
*
* Global azkaban properties for setup. All of them are optional unless otherwise marked:
* azkaban.name - The displayed name of this instance. azkaban.label - Short descriptor of this
* Azkaban instance. azkaban.color - Theme color azkaban.temp.dir - Temp dir used by Azkaban for
* various file uses. web.resource.dir - The directory that contains the static web files.
* default.timezone.id - The timezone code. I.E. America/Los Angeles
*
* user.manager.class - The UserManager class used for the user manager. Default is XmlUserManager.
* project.manager.class - The ProjectManager to load projects project.global.properties - The base
* properties inherited by all projects and jobs
*
* jetty.maxThreads - # of threads for jetty jetty.ssl.port - The ssl port used for sessionizing.
* jetty.keystore - Jetty keystore . jetty.keypassword - Jetty keystore password jetty.truststore -
* Jetty truststore jetty.trustpassword - Jetty truststore password
*/
@Singleton
public class AzkabanWebServer extends AzkabanServer implements IMBeanRegistrable {
public static final String DEFAULT_CONF_PATH = "conf";
private static final String AZKABAN_ACCESS_LOGGER_NAME =
"azkaban.webapp.servlet.LoginAbstractAzkabanServlet";
private static final Logger logger = Logger.getLogger(AzkabanWebServer.class);
private static final int MAX_FORM_CONTENT_SIZE = 10 * 1024 * 1024;
private static final String DEFAULT_TIMEZONE_ID = "default.timezone.id";
private static final String DEFAULT_STATIC_DIR = "";
@Deprecated
private static AzkabanWebServer app;
private final MBeanRegistrationManager mbeanRegistrationManager = new MBeanRegistrationManager();
private final VelocityEngine velocityEngine;
private final StatusService statusService;
private final Server server;
private final UserManager userManager;
private final ProjectManager projectManager;
private final ExecutorManagerAdapter executorManagerAdapter;
private final ScheduleManager scheduleManager;
private final TriggerManager triggerManager;
private final MetricsManager metricsManager;
private final Props props;
private final SessionCache sessionCache;
private final FlowTriggerScheduler scheduler;
private final FlowTriggerService flowTriggerService;
private Map<String, TriggerPlugin> triggerPlugins;
private final ExecutionLogsCleaner executionLogsCleaner;
@Inject
public AzkabanWebServer(final Props props,
final Server server,
final ExecutorManagerAdapter executorManagerAdapter,
final ProjectManager projectManager,
final TriggerManager triggerManager,
final MetricsManager metricsManager,
final SessionCache sessionCache,
final UserManager userManager,
final ScheduleManager scheduleManager,
final VelocityEngine velocityEngine,
final FlowTriggerScheduler scheduler,
final FlowTriggerService flowTriggerService,
final StatusService statusService,
final ExecutionLogsCleaner executionLogsCleaner) {
this.props = requireNonNull(props, "props is null.");
this.server = requireNonNull(server, "server is null.");
this.executorManagerAdapter = requireNonNull(executorManagerAdapter,
"executorManagerAdapter is null.");
this.projectManager = requireNonNull(projectManager, "projectManager is null.");
this.triggerManager = requireNonNull(triggerManager, "triggerManager is null.");
this.metricsManager = requireNonNull(metricsManager, "metricsManager is null.");
this.sessionCache = requireNonNull(sessionCache, "sessionCache is null.");
this.userManager = requireNonNull(userManager, "userManager is null.");
this.scheduleManager = requireNonNull(scheduleManager, "scheduleManager is null.");
this.velocityEngine = requireNonNull(velocityEngine, "velocityEngine is null.");
this.statusService = statusService;
this.scheduler = requireNonNull(scheduler, "scheduler is null.");
this.flowTriggerService = requireNonNull(flowTriggerService, "flow trigger service is null");
this.executionLogsCleaner = requireNonNull(executionLogsCleaner, "executionlogcleaner is null");
loadBuiltinCheckersAndActions();
// load all trigger agents here
final String triggerPluginDir =
props.getString("trigger.plugin.dir", "plugins/triggers");
new PluginCheckerAndActionsLoader().load(triggerPluginDir);
// Setup time zone
if (props.containsKey(DEFAULT_TIMEZONE_ID)) {
final String timezoneId = props.getString(DEFAULT_TIMEZONE_ID);
System.setProperty("user.timezone", timezoneId);
TimeZone timeZone = TimeZone.getTimeZone(timezoneId);
TimeZone.setDefault(timeZone);
DateTimeZone.setDefault(DateTimeZone.forTimeZone(timeZone));
logger.info("Setting timezone to " + timezoneId);
}
configureMBeanServer();
}
@Deprecated
public static AzkabanWebServer getInstance() {
return app;
}
public static void main(final String[] args) throws Exception {
// Redirect all std out and err messages into log4j
StdOutErrRedirect.redirectOutAndErrToLog();
logger.info("Starting Jetty Azkaban Web Server...");
final Props props = AzkabanServer.loadProps(args);
if (props == null) {
logger.error("Azkaban Properties not loaded. Exiting..");
System.exit(1);
}
/* Initialize Guice Injector */
final Injector injector = Guice.createInjector(
new AzkabanCommonModule(props),
new AzkabanWebServerModule(props)
);
SERVICE_PROVIDER.setInjector(injector);
launch(injector.getInstance(AzkabanWebServer.class));
}
public static void launch(final AzkabanWebServer webServer) throws Exception {
/* This creates the Web Server instance */
app = webServer;
webServer.executorManagerAdapter.start();
webServer.executionLogsCleaner.start();
// TODO refactor code into ServerProvider
webServer.prepareAndStartServer();
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
if (webServer.props.getBoolean(ConfigurationKeys.ENABLE_QUARTZ, false)) {
AzkabanWebServer.logger.info("Shutting down flow trigger scheduler...");
webServer.scheduler.shutdown();
}
} catch (final Exception e) {
AzkabanWebServer.logger.error("Exception while shutting down flow trigger service.", e);
}
try {
if (webServer.props.getBoolean(ConfigurationKeys.ENABLE_QUARTZ, false)) {
AzkabanWebServer.logger.info("Shutting down flow trigger service...");
webServer.flowTriggerService.shutdown();
}
} catch (final Exception e) {
AzkabanWebServer.logger.error("Exception while shutting down flow trigger service.", e);
}
try {
AzkabanWebServer.logger.info("Logging top memory consumers...");
logTopMemoryConsumers();
AzkabanWebServer.logger.info("Shutting down http server...");
webServer.close();
} catch (final Exception e) {
AzkabanWebServer.logger.error("Exception while shutting down web server.", e);
}
AzkabanWebServer.logger.info("kk thx bye.");
}
public void logTopMemoryConsumers() throws Exception {
if (new File("/bin/bash").exists() && new File("/bin/ps").exists()
&& new File("/usr/bin/head").exists()) {
AzkabanWebServer.logger.info("logging top memory consumer");
final java.lang.ProcessBuilder processBuilder =
new java.lang.ProcessBuilder("/bin/bash", "-c",
"/bin/ps aux --sort -rss | /usr/bin/head");
final Process p = processBuilder.start();
p.waitFor();
final InputStream is = p.getInputStream();
final java.io.BufferedReader reader =
new java.io.BufferedReader(new InputStreamReader(is, StandardCharsets.UTF_8));
String line = null;
while ((line = reader.readLine()) != null) {
AzkabanWebServer.logger.info(line);
}
is.close();
}
}
});
}
private static void loadViewerPlugins(final Context root, final String pluginPath,
final VelocityEngine ve) {
final File viewerPluginPath = new File(pluginPath);
if (!viewerPluginPath.exists()) {
return;
}
final ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
final File[] pluginDirs = viewerPluginPath.listFiles();
final ArrayList<String> jarPaths = new ArrayList<>();
for (final File pluginDir : pluginDirs) {
// load plugin properties
final Props pluginProps = PropsUtils.loadPluginProps(pluginDir);
if (pluginProps == null) {
continue;
}
final String pluginName = pluginProps.getString("viewer.name");
final String pluginWebPath = pluginProps.getString("viewer.path");
final String pluginJobTypes = pluginProps.getString("viewer.jobtypes", null);
final int pluginOrder = pluginProps.getInt("viewer.order", 0);
final boolean pluginHidden = pluginProps.getBoolean("viewer.hidden", false);
final List<String> extLibClassPaths =
pluginProps.getStringList("viewer.external.classpaths",
(List<String>) null);
final String pluginClass = pluginProps.getString("viewer.servlet.class");
if (pluginClass == null) {
logger.error("Viewer class is not set.");
continue;
} else {
logger.info("Plugin class " + pluginClass);
}
Class<?> viewerClass =
PluginUtils.getPluginClass(pluginClass, pluginDir, extLibClassPaths, parentLoader);
if (viewerClass == null) {
continue;
}
final String source = FileIOUtils.getSourcePathFromClass(viewerClass);
logger.info("Source jar " + source);
jarPaths.add("jar:file:" + source);
Constructor<?> constructor = null;
try {
constructor = viewerClass.getConstructor(Props.class);
} catch (final NoSuchMethodException e) {
logger.error("Constructor not found in " + pluginClass);
continue;
}
Object obj = null;
try {
obj = constructor.newInstance(pluginProps);
} catch (final Exception e) {
logger.error(e);
logger.error(e.getCause());
}
if (!(obj instanceof AbstractAzkabanServlet)) {
logger.error("The object is not an AbstractAzkabanServlet");
continue;
}
final AbstractAzkabanServlet avServlet = (AbstractAzkabanServlet) obj;
root.addServlet(new ServletHolder(avServlet), "/" + pluginWebPath + "/*");
PluginRegistry.getRegistry().register(
new ViewerPlugin(pluginName, pluginWebPath, pluginOrder,
pluginHidden, pluginJobTypes));
}
// Velocity needs the jar resource paths to be set.
final String jarResourcePath = StringUtils.join(jarPaths, ", ");
logger.info("Setting jar resource path " + jarResourcePath);
ve.addProperty("jar.resource.loader.path", jarResourcePath);
}
public FlowTriggerService getFlowTriggerService() {
return this.flowTriggerService;
}
public FlowTriggerScheduler getScheduler() {
return this.scheduler;
}
private void validateDatabaseVersion()
throws IOException, SQLException {
final boolean checkDB = this.props
.getBoolean(AzkabanDatabaseSetup.DATABASE_CHECK_VERSION, false);
if (checkDB) {
final AzkabanDatabaseSetup setup = new AzkabanDatabaseSetup(this.props);
setup.loadTableInfo();
if (setup.needsUpdating()) {
logger.error("Database is out of date.");
setup.printUpgradePlan();
logger.error("Exiting with error.");
System.exit(-1);
}
}
}
private void configureRoutes() throws TriggerManagerException {
final String staticDir =
this.props.getString("web.resource.dir", DEFAULT_STATIC_DIR);
logger.info("Setting up web resource dir " + staticDir);
final Context root = new Context(this.server, "/", Context.SESSIONS);
root.setMaxFormContentSize(MAX_FORM_CONTENT_SIZE);
final String defaultServletPath =
this.props.getString("azkaban.default.servlet.path", "/index");
root.setResourceBase(staticDir);
final ServletHolder indexRedirect =
new ServletHolder(new IndexRedirectServlet(defaultServletPath));
root.addServlet(indexRedirect, "/");
final ServletHolder index = new ServletHolder(new ProjectServlet());
root.addServlet(index, "/index");
final ServletHolder staticServlet = new ServletHolder(new DefaultServlet());
root.addServlet(staticServlet, "/css/*");
root.addServlet(staticServlet, "/js/*");
root.addServlet(staticServlet, "/images/*");
root.addServlet(staticServlet, "/fonts/*");
root.addServlet(staticServlet, "/favicon.ico");
root.addServlet(new ServletHolder(new ProjectManagerServlet()), "/manager");
root.addServlet(new ServletHolder(new ExecutorServlet()), "/executor");
root.addServlet(new ServletHolder(new HistoryServlet()), "/history");
root.addServlet(new ServletHolder(new ScheduleServlet()), "/schedule");
root.addServlet(new ServletHolder(new JMXHttpServlet()), "/jmx");
root.addServlet(new ServletHolder(new TriggerManagerServlet()), "/triggers");
root.addServlet(new ServletHolder(new StatsServlet()), "/stats");
root.addServlet(new ServletHolder(new StatusServlet(this.statusService)), "/status");
root.addServlet(new ServletHolder(new NoteServlet()), "/notes");
root.addServlet(new ServletHolder(new FlowTriggerInstanceServlet()), "/flowtriggerinstance");
root.addServlet(new ServletHolder(new FlowTriggerServlet()), "/flowtrigger");
final ServletHolder restliHolder = new ServletHolder(new RestliServlet());
restliHolder.setInitParameter("resourcePackages", "azkaban.restli");
root.addServlet(restliHolder, "/restli/*");
final String viewerPluginDir =
this.props.getString("viewer.plugin.dir", "plugins/viewer");
loadViewerPlugins(root, viewerPluginDir, getVelocityEngine());
// Trigger Plugin Loader
final TriggerPluginLoader triggerPluginLoader = new TriggerPluginLoader(this.props);
final Map<String, TriggerPlugin> triggerPlugins = triggerPluginLoader.loadTriggerPlugins(root);
setTriggerPlugins(triggerPlugins);
// always have basic time trigger
// TODO: find something else to do the job
getTriggerManager().start();
root.setAttribute(Constants.AZKABAN_SERVLET_CONTEXT_KEY, this);
}
private void prepareAndStartServer()
throws Exception {
validateDatabaseVersion();
createThreadPool();
configureRoutes();
if (this.props.getBoolean(Constants.ConfigurationKeys.IS_METRICS_ENABLED, false)) {
startWebMetrics();
}
if (this.props.getBoolean(ConfigurationKeys.ENABLE_QUARTZ, false)) {
// flowTriggerService needs to be started first before scheduler starts to schedule
// existing flow triggers
logger.info("starting flow trigger service");
this.flowTriggerService.start();
logger.info("starting flow trigger scheduler");
this.scheduler.start();
}
try {
this.server.start();
logger.info("Server started");
} catch (final Exception e) {
logger.warn(e);
Utils.croak(e.getMessage(), 1);
}
}
private void createThreadPool() {
final int maxThreads = this.props
.getInt("jetty.maxThreads", Constants.DEFAULT_JETTY_MAX_THREAD_COUNT);
final QueuedThreadPool httpThreadPool = new QueuedThreadPool(maxThreads);
this.server.setThreadPool(httpThreadPool);
addThreadPoolGauges(httpThreadPool);
}
private void addThreadPoolGauges(final QueuedThreadPool threadPool) {
// The number of idle threads in Jetty thread pool
this.metricsManager.addGauge("JETTY-NumIdleThreads", threadPool::getIdleThreads);
// The number of threads in Jetty thread pool. The formula is:
// threads = idleThreads + busyThreads
this.metricsManager.addGauge("JETTY-NumTotalThreads", threadPool::getThreads);
// The number of requests queued in the Jetty thread pool.
this.metricsManager.addGauge("JETTY-NumQueueSize", threadPool::getQueueSize);
}
private void startWebMetrics() throws Exception {
this.metricsManager
.addGauge("WEB-NumQueuedFlows", this.executorManagerAdapter::getQueuedFlowSize);
/*
* TODO: Currently {@link ExecutorManager#getRunningFlows()} includes both running and non-dispatched flows.
* Originally we would like to do a subtraction between getRunningFlows and {@link ExecutorManager#getQueuedFlowSize()},
* in order to have the correct runnable flows.
* However, both getRunningFlows and getQueuedFlowSize are not synchronized, such that we can not make
* a thread safe subtraction. We need to fix this in the future.
*/
this.metricsManager.addGauge("WEB-NumRunningFlows",
() -> (this.executorManagerAdapter.getRunningFlows().size()));
this.metricsManager.addGauge("session-count", this.sessionCache::getSessionCount);
logger.info("starting reporting Web Server Metrics");
this.metricsManager.startReporting("AZ-WEB", this.props);
}
private void loadBuiltinCheckersAndActions() {
logger.info("Loading built-in checker and action types");
ExecuteFlowAction.setExecutorManager(this.executorManagerAdapter);
ExecuteFlowAction.setProjectManager(this.projectManager);
ExecuteFlowAction.setTriggerManager(this.triggerManager);
KillExecutionAction.setExecutorManager(this.executorManagerAdapter);
CreateTriggerAction.setTriggerManager(this.triggerManager);
ExecutionChecker.setExecutorManager(this.executorManagerAdapter);
this.triggerManager.registerCheckerType(BasicTimeChecker.type, BasicTimeChecker.class);
this.triggerManager.registerCheckerType(SlaChecker.type, SlaChecker.class);
this.triggerManager.registerCheckerType(ExecutionChecker.type, ExecutionChecker.class);
this.triggerManager.registerActionType(ExecuteFlowAction.type, ExecuteFlowAction.class);
this.triggerManager.registerActionType(KillExecutionAction.type, KillExecutionAction.class);
this.triggerManager.registerActionType(SlaAlertAction.type, SlaAlertAction.class);
this.triggerManager.registerActionType(CreateTriggerAction.type, CreateTriggerAction.class);
}
/**
* Returns the web session cache.
*/
@Override
public SessionCache getSessionCache() {
return this.sessionCache;
}
/**
* Returns the velocity engine for pages to use.
*/
@Override
public VelocityEngine getVelocityEngine() {
return this.velocityEngine;
}
@Override
public UserManager getUserManager() {
return this.userManager;
}
public ProjectManager getProjectManager() {
return this.projectManager;
}
public ExecutorManagerAdapter getExecutorManager() {
return this.executorManagerAdapter;
}
public ScheduleManager getScheduleManager() {
return this.scheduleManager;
}
public TriggerManager getTriggerManager() {
return this.triggerManager;
}
/**
* Returns the global azkaban properties
*/
@Override
public Props getServerProps() {
return this.props;
}
public Map<String, TriggerPlugin> getTriggerPlugins() {
return this.triggerPlugins;
}
private void setTriggerPlugins(final Map<String, TriggerPlugin> triggerPlugins) {
this.triggerPlugins = triggerPlugins;
}
@Override
public MBeanRegistrationManager getMBeanRegistrationManager() {
return this.mbeanRegistrationManager;
}
@Override
public void configureMBeanServer() {
logger.info("Registering MBeans...");
this.mbeanRegistrationManager.registerMBean("jetty", new JmxJettyServer(this.server));
this.mbeanRegistrationManager.registerMBean("triggerManager", new JmxTriggerManager(this.triggerManager));
if (this.executorManagerAdapter instanceof ExecutorManager) {
this.mbeanRegistrationManager.registerMBean("executorManager",
new JmxExecutorManager((ExecutorManager) this.executorManagerAdapter));
} else if (this.executorManagerAdapter instanceof ExecutionController) {
this.mbeanRegistrationManager.registerMBean("executionController",
new JmxExecutionController((ExecutionController) this.executorManagerAdapter));
}
// Register Log4J loggers as JMX beans so the log level can be
// updated via JConsole or Java VisualVM
final HierarchyDynamicMBean log4jMBean = new HierarchyDynamicMBean();
this.mbeanRegistrationManager.registerMBean("log4jmxbean", log4jMBean);
final ObjectName accessLogLoggerObjName =
log4jMBean.addLoggerMBean(AZKABAN_ACCESS_LOGGER_NAME);
if (accessLogLoggerObjName == null) {
logger.info(
"************* loginLoggerObjName is null, make sure there is a logger with name "
+ AZKABAN_ACCESS_LOGGER_NAME);
} else {
logger.info("******** loginLoggerObjName: "
+ accessLogLoggerObjName.getCanonicalName());
}
}
public void close() {
this.mbeanRegistrationManager.closeMBeans();
this.scheduleManager.shutdown();
this.executorManagerAdapter.shutdown();
try {
this.server.stop();
} catch (final Exception e) {
// Catch all while closing server
logger.error(e);
}
this.server.destroy();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/AzkabanWebServerModule.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import azkaban.Constants.ConfigurationKeys;
import azkaban.executor.ExecutionController;
import azkaban.executor.ExecutorManager;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.flowtrigger.database.FlowTriggerInstanceLoader;
import azkaban.flowtrigger.database.JdbcFlowTriggerInstanceLoaderImpl;
import azkaban.flowtrigger.plugin.FlowTriggerDependencyPluginException;
import azkaban.flowtrigger.plugin.FlowTriggerDependencyPluginManager;
import azkaban.scheduler.ScheduleLoader;
import azkaban.scheduler.TriggerBasedScheduleLoader;
import azkaban.user.UserManager;
import azkaban.user.XmlUserManager;
import azkaban.utils.Props;
import com.google.inject.AbstractModule;
import com.google.inject.Provides;
import java.lang.reflect.Constructor;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.apache.log4j.Logger;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.runtime.log.Log4JLogChute;
import org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader;
import org.apache.velocity.runtime.resource.loader.JarResourceLoader;
import org.mortbay.jetty.Server;
/**
* This Guice module is currently a one place container for all bindings in the current module. This
* is intended to help during the migration process to Guice. Once this class starts growing we can
* move towards more modular structuring of Guice components.
*/
public class AzkabanWebServerModule extends AbstractModule {
private static final Logger log = Logger.getLogger(AzkabanWebServerModule.class);
private static final String USER_MANAGER_CLASS_PARAM = "user.manager.class";
private static final String VELOCITY_DEV_MODE_PARAM = "velocity.dev.mode";
private final Props props;
public AzkabanWebServerModule(final Props props) {
this.props = props;
}
@Provides
@Singleton
public FlowTriggerDependencyPluginManager getDependencyPluginManager(final Props props)
throws FlowTriggerDependencyPluginException {
//todo chengren311: disable requireNonNull for now in beta since dependency plugin dir is not
// required. Add it back when flow trigger feature is enabled in production
String dependencyPluginDir;
try {
dependencyPluginDir = props.getString(ConfigurationKeys.DEPENDENCY_PLUGIN_DIR);
} catch (final Exception ex) {
dependencyPluginDir = null;
}
return new FlowTriggerDependencyPluginManager(dependencyPluginDir);
}
@Override
protected void configure() {
bind(Server.class).toProvider(WebServerProvider.class);
bind(ScheduleLoader.class).to(TriggerBasedScheduleLoader.class);
bind(FlowTriggerInstanceLoader.class).to(JdbcFlowTriggerInstanceLoaderImpl.class);
bind(ExecutorManagerAdapter.class).to(resolveExecutorManagerAdaptorClassType());
}
private Class<? extends ExecutorManagerAdapter> resolveExecutorManagerAdaptorClassType() {
return this.props.getBoolean(ConfigurationKeys.AZKABAN_POLL_MODEL, false)
? ExecutionController.class : ExecutorManager.class;
}
@Inject
@Singleton
@Provides
public UserManager createUserManager(final Props props) {
final Class<?> userManagerClass = props.getClass(USER_MANAGER_CLASS_PARAM, null);
final UserManager manager;
if (userManagerClass != null && userManagerClass.getConstructors().length > 0) {
log.info("Loading user manager class " + userManagerClass.getName());
try {
final Constructor<?> userManagerConstructor = userManagerClass.getConstructor(Props.class);
manager = (UserManager) userManagerConstructor.newInstance(props);
} catch (final Exception e) {
log.error("Could not instantiate UserManager " + userManagerClass.getName());
throw new RuntimeException(e);
}
} else {
manager = new XmlUserManager(props);
}
return manager;
}
@Inject
@Singleton
@Provides
public VelocityEngine createVelocityEngine(final Props props) {
final boolean devMode = props.getBoolean(VELOCITY_DEV_MODE_PARAM, false);
final VelocityEngine engine = new VelocityEngine();
engine.setProperty("resource.loader", "classpath, jar");
engine.setProperty("classpath.resource.loader.class",
ClasspathResourceLoader.class.getName());
engine.setProperty("classpath.resource.loader.cache", !devMode);
engine.setProperty("classpath.resource.loader.modificationCheckInterval",
5L);
engine.setProperty("jar.resource.loader.class",
JarResourceLoader.class.getName());
engine.setProperty("jar.resource.loader.cache", !devMode);
engine.setProperty("resource.manager.logwhenfound", false);
engine.setProperty("input.encoding", "UTF-8");
engine.setProperty("output.encoding", "UTF-8");
engine.setProperty("directive.set.null.allowed", true);
engine.setProperty("resource.manager.logwhenfound", false);
engine.setProperty("velocimacro.permissions.allow.inline", true);
engine.setProperty("velocimacro.library.autoreload", devMode);
engine.setProperty("velocimacro.library",
"/azkaban/webapp/servlet/velocity/macros.vm");
engine.setProperty(
"velocimacro.permissions.allow.inline.to.replace.global", true);
engine.setProperty("velocimacro.arguments.strict", true);
engine.setProperty("runtime.log.invalid.references", devMode);
engine.setProperty("runtime.log.logsystem.class", Log4JLogChute.class);
engine.setProperty("runtime.log.logsystem.log4j.logger",
Logger.getLogger("org.apache.velocity.Logger"));
engine.setProperty("parser.pool.size", 3);
return engine;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/ExecutionLogsCleaner.java
|
package azkaban.webapp;
import azkaban.Constants.ConfigurationKeys;
import azkaban.executor.ExecutorLoader;
import azkaban.utils.Props;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.joda.time.DateTime;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
@SuppressWarnings("FutureReturnValueIgnored")
public class ExecutionLogsCleaner {
private static final Logger logger = LoggerFactory.getLogger(ExecutionLogsCleaner.class);
private final ScheduledExecutorService scheduler;
private final ExecutorLoader executorLoader;
private final Props azkProps;
private long executionLogsRetentionMs;
// 12 weeks
private static final long DEFAULT_EXECUTION_LOGS_RETENTION_MS = 3 * 4 * 7
* 24 * 60 * 60 * 1000L;
// 1 hour
private static final long DEFAULT_LOG_CLEANUP_INTERVAL_SECONDS = 60 * 60;
private long cleanupIntervalInSeconds;
private static final int DEFAULT_LOG_CLEANUP_RECORD_LIMIT = 1000;
private int executionLogCleanupRecordLimit;
@Inject
public ExecutionLogsCleaner(final Props azkProps, final ExecutorLoader executorLoader) {
this.azkProps = azkProps;
this.executorLoader = executorLoader;
this.scheduler = Executors.newSingleThreadScheduledExecutor();
this.executionLogsRetentionMs = this.azkProps.getLong(
ConfigurationKeys.EXECUTION_LOGS_RETENTION_MS,
DEFAULT_EXECUTION_LOGS_RETENTION_MS);
this.cleanupIntervalInSeconds = this.azkProps.getLong(
ConfigurationKeys.EXECUTION_LOGS_CLEANUP_INTERVAL_SECONDS,
DEFAULT_LOG_CLEANUP_INTERVAL_SECONDS);
this.executionLogCleanupRecordLimit =
this.azkProps.getInt(ConfigurationKeys.EXECUTION_LOGS_CLEANUP_RECORD_LIMIT,
DEFAULT_LOG_CLEANUP_RECORD_LIMIT);
}
public void start() {
logger.info("Starting execution logs clean up thread");
this.scheduler.scheduleAtFixedRate(() -> cleanExecutionLogs(), 0L, cleanupIntervalInSeconds,
TimeUnit.SECONDS);
}
private void cleanExecutionLogs() {
logger.info("Cleaning old logs from execution_logs");
final long cutoff = System.currentTimeMillis() - this.executionLogsRetentionMs;
logger.info("Cleaning old log files before "
+ new DateTime(cutoff).toString());
cleanOldExecutionLogs(cutoff);
}
private void cleanOldExecutionLogs(final long millis) {
final long beforeDeleteLogsTimestamp = System.currentTimeMillis();
try {
final int count = this.executorLoader.removeExecutionLogsByTime(millis, this.executionLogCleanupRecordLimit);
logger.info("Cleaned up " + count + " log entries.");
} catch (final Exception e) {
logger.error("log clean up failed. ", e);
}
logger.info(
"log clean up time: " + (System.currentTimeMillis() - beforeDeleteLogsTimestamp)
+ " ms.");
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/PluginCheckerAndActionsLoader.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import azkaban.utils.FileIOUtils;
import azkaban.utils.PluginUtils;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.utils.Utils;
import java.io.File;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
public class PluginCheckerAndActionsLoader {
private static final Logger log = Logger.getLogger(PluginCheckerAndActionsLoader.class);
public void load(final String pluginPath) {
log.info("Loading plug-in checker and action types");
final File triggerPluginPath = new File(pluginPath);
if (!triggerPluginPath.exists()) {
log.error("plugin path " + pluginPath + " doesn't exist!");
return;
}
final ClassLoader parentLoader = getClass().getClassLoader();
final File[] pluginDirs = triggerPluginPath.listFiles();
final ArrayList<String> jarPaths = new ArrayList<>();
for (final File pluginDir : pluginDirs) {
// load plugin properties
final Props pluginProps = PropsUtils.loadPluginProps(pluginDir);
if (pluginProps == null) {
continue;
}
final List<String> extLibClassPaths =
pluginProps.getStringList("trigger.external.classpaths",
(List<String>) null);
final String pluginClass = pluginProps.getString("trigger.class");
if (pluginClass == null) {
log.error("Trigger class is not set.");
continue;
} else {
log.info("Plugin class " + pluginClass);
}
URLClassLoader urlClassLoader = PluginUtils
.getURLClassLoader(pluginDir, extLibClassPaths, parentLoader);
if (urlClassLoader == null) {
continue;
}
Class<?> triggerClass =
PluginUtils.getPluginClass(pluginClass, urlClassLoader);
if (triggerClass == null) {
continue;
}
final String source = FileIOUtils.getSourcePathFromClass(triggerClass);
log.info("Source jar " + source);
jarPaths.add("jar:file:" + source);
try {
Utils.invokeStaticMethod(urlClassLoader, pluginClass,
"initiateCheckerTypes", pluginProps, this);
} catch (final Exception e) {
log.error("Unable to initiate checker types for " + pluginClass);
continue;
}
try {
Utils.invokeStaticMethod(urlClassLoader, pluginClass,
"initiateActionTypes", pluginProps, this);
} catch (final Exception e) {
log.error("Unable to initiate action types for " + pluginClass);
continue;
}
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/Status.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import azkaban.executor.Executor;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
/**
* This POJO is used by GSON library to create a status JSON object. Certain warnings do not apply
* here.
*/
@SuppressWarnings({"FieldCanBeLocal", "unused"})
public class Status {
private final String version;
private final String pid;
private final String installationPath;
private final long usedMemory, xmx;
private final boolean isDatabaseUp;
private final Map<Integer, Executor> executorStatusMap;
Status(final String version,
final String pid,
final String installationPath,
final long usedMemory,
final long xmx,
final boolean isDatabaseUp,
final Map<Integer, Executor> executorStatusMap) {
this.version = version;
this.pid = pid;
this.installationPath = installationPath;
this.usedMemory = usedMemory;
this.xmx = xmx;
this.isDatabaseUp = isDatabaseUp;
this.executorStatusMap = ImmutableMap.copyOf(executorStatusMap);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/StatusService.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import static azkaban.webapp.servlet.AbstractAzkabanServlet.jarVersion;
import azkaban.Constants.ConfigurationKeys;
import azkaban.db.DatabaseOperator;
import azkaban.executor.Executor;
import azkaban.executor.ExecutorLoader;
import azkaban.executor.ExecutorManagerException;
import azkaban.utils.Props;
import com.google.common.io.Files;
import javax.inject.Inject;
import javax.inject.Singleton;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class StatusService {
private static final Logger log = LoggerFactory.getLogger(StatusService.class);
private static final File PACKAGE_JAR = new File(
StatusService.class.getProtectionDomain().getCodeSource().getLocation().getPath());
private final ExecutorLoader executorLoader;
private final DatabaseOperator dbOperator;
private final String pidFilename;
@Inject
public StatusService(final Props props, final ExecutorLoader executorLoader,
final DatabaseOperator dbOperator) {
this.executorLoader = executorLoader;
this.dbOperator = dbOperator;
this.pidFilename = props.getString(ConfigurationKeys.AZKABAN_PID_FILENAME, "currentpid");
}
private static String getInstallationPath() {
try {
return PACKAGE_JAR.getCanonicalPath();
} catch (final IOException e) {
log.error("Unable to obtain canonical path. Reporting absolute path instead", e);
return PACKAGE_JAR.getAbsolutePath();
}
}
public Status getStatus() {
final String version = jarVersion == null ? "unknown" : jarVersion;
final Runtime runtime = Runtime.getRuntime();
final long usedMemory = runtime.totalMemory() - runtime.freeMemory();
// Build the status object
return new Status(version,
getPid(),
getInstallationPath(),
usedMemory,
runtime.maxMemory(),
getDbStatus(),
getActiveExecutors());
}
private String getPid() {
final File libDir = PACKAGE_JAR.getParentFile();
final File installDir = libDir.getParentFile();
final File pidFile = new File(installDir, this.pidFilename);
try {
return Files.readFirstLine(pidFile, StandardCharsets.UTF_8).trim();
} catch (final IOException e) {
log.error("Unable to obtain PID", e);
return "unknown";
}
}
private Map<Integer, Executor> getActiveExecutors() {
final Map<Integer, Executor> executorMap = new HashMap<>();
try {
final List<Executor> executors = this.executorLoader.fetchActiveExecutors();
for (final Executor executor : executors) {
executorMap.put(executor.getId(), executor);
}
} catch (final ExecutorManagerException e) {
log.error("Fetching executors failed!", e);
}
return executorMap;
}
private boolean getDbStatus() {
try {
return this.dbOperator.query("SELECT 1", rs -> true);
} catch (final SQLException e) {
log.error("DB Error", e);
}
return false;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/TriggerPluginLoader.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import azkaban.utils.FileIOUtils;
import azkaban.utils.PluginUtils;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.webapp.plugin.TriggerPlugin;
import java.io.File;
import java.lang.reflect.Constructor;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.log4j.Logger;
import org.apache.velocity.app.VelocityEngine;
import org.mortbay.jetty.servlet.Context;
public class TriggerPluginLoader {
private static final Logger log = Logger.getLogger(TriggerPluginLoader.class);
private final String pluginPath;
public TriggerPluginLoader(final Props props) {
this.pluginPath = props.getString("trigger.plugin.dir", "plugins/triggers");
}
public Map<String, TriggerPlugin> loadTriggerPlugins(final Context root) {
/*
* TODO spyne: TriggerPluginLoader should not have any dependency on Azkaban Web Server
**/
final AzkabanWebServer azkabanWebServer = SERVICE_PROVIDER.getInstance(AzkabanWebServer.class);
final File triggerPluginPath = new File(this.pluginPath);
if (!triggerPluginPath.exists()) {
return new HashMap<>();
}
final Map<String, TriggerPlugin> installedTriggerPlugins = new HashMap<>();
final ClassLoader parentLoader = AzkabanWebServer.class.getClassLoader();
final File[] pluginDirs = triggerPluginPath.listFiles();
final ArrayList<String> jarPaths = new ArrayList<>();
for (final File pluginDir : pluginDirs) {
// load plugin properties
final Props pluginProps = PropsUtils.loadPluginProps(pluginDir);
if (pluginProps == null) {
continue;
}
final String pluginName = pluginProps.getString("trigger.name");
final List<String> extLibClassPaths =
pluginProps.getStringList("trigger.external.classpaths",
(List<String>) null);
final String pluginClass = pluginProps.getString("trigger.class");
if (pluginClass == null) {
log.error("Trigger class is not set.");
continue;
} else {
log.info("Plugin class " + pluginClass);
}
Class<?> triggerClass =
PluginUtils.getPluginClass(pluginClass, pluginDir, extLibClassPaths, parentLoader);
if (triggerClass == null) {
continue;
}
final String source = FileIOUtils.getSourcePathFromClass(triggerClass);
log.info("Source jar " + source);
jarPaths.add("jar:file:" + source);
Constructor<?> constructor = null;
try {
constructor = triggerClass
.getConstructor(String.class, Props.class, Context.class, AzkabanWebServer.class);
} catch (final NoSuchMethodException e) {
log.error("Constructor not found in " + pluginClass);
continue;
}
Object obj = null;
try {
obj = constructor.newInstance(pluginName, pluginProps, root, azkabanWebServer);
} catch (final Exception e) {
log.error(e);
}
if (!(obj instanceof TriggerPlugin)) {
log.error("The object is not an TriggerPlugin");
continue;
}
final TriggerPlugin plugin = (TriggerPlugin) obj;
installedTriggerPlugins.put(pluginName, plugin);
}
// Velocity needs the jar resource paths to be set.
final String jarResourcePath = StringUtils.join(jarPaths, ", ");
log.info("Setting jar resource path " + jarResourcePath);
final VelocityEngine ve = azkabanWebServer.getVelocityEngine();
ve.addProperty("jar.resource.loader.path", jarResourcePath);
return installedTriggerPlugins;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/WebMetrics.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp;
import azkaban.metrics.MetricsManager;
import com.codahale.metrics.Meter;
import java.util.concurrent.atomic.AtomicLong;
import javax.inject.Inject;
import javax.inject.Singleton;
/**
* This singleton class WebMetrics is in charge of collecting varieties of metrics from
* azkaban-web-server modules.
*/
@Singleton
public class WebMetrics {
private final Meter webGetCall;
private final Meter webPostCall;
// How long does user log fetch take when user call fetch-log api.
private final AtomicLong logFetchLatency = new AtomicLong(0L);
@Inject
WebMetrics(final MetricsManager metricsManager) {
this.webGetCall = metricsManager.addMeter("Web-Get-Call-Meter");
this.webPostCall = metricsManager.addMeter("Web-Post-Call-Meter");
metricsManager.addGauge("fetchLogLatency", this.logFetchLatency::get);
}
/**
* Mark the occurrence of a GET call
*
* This method should be Thread Safe. Two reasons that we don't make this function call
* synchronized: 1). drop wizard metrics deals with concurrency internally; 2). mark is basically
* a math addition operation, which should not cause race condition issue.
*/
public void markWebGetCall() {
this.webGetCall.mark();
}
public void markWebPostCall() {
this.webPostCall.mark();
}
public void setFetchLogLatency(final long milliseconds) {
this.logFetchLatency.set(milliseconds);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/WebServerProvider.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp;
import static java.util.Objects.requireNonNull;
import azkaban.Constants;
import azkaban.utils.Props;
import javax.inject.Inject;
import com.google.inject.Provider;
import java.util.List;
import org.apache.log4j.Logger;
import org.mortbay.jetty.Connector;
import org.mortbay.jetty.Server;
import org.mortbay.jetty.bio.SocketConnector;
import org.mortbay.jetty.security.SslSocketConnector;
public class WebServerProvider implements Provider<Server> {
private static final Logger logger = Logger.getLogger(WebServerProvider.class);
private static final int MAX_HEADER_BUFFER_SIZE = 10 * 1024 * 1024;
@Inject
private Props props;
@Override
public Server get() {
requireNonNull(this.props);
final int maxThreads = this.props
.getInt("jetty.maxThreads", Constants.DEFAULT_JETTY_MAX_THREAD_COUNT);
final boolean useSsl = this.props.getBoolean("jetty.use.ssl", true);
final int port;
final Server server = new Server();
if (useSsl) {
final int sslPortNumber = this.props
.getInt("jetty.ssl.port", Constants.DEFAULT_SSL_PORT_NUMBER);
port = sslPortNumber;
server.addConnector(getSslSocketConnector(sslPortNumber));
} else {
port = this.props.getInt("jetty.port", Constants.DEFAULT_PORT_NUMBER);
server.addConnector(getSocketConnector(port));
}
// setting stats configuration for connectors
setStatsOnConnectors(server);
logger.info(String.format(
"Starting %sserver on port: %d # Max threads: %d", useSsl ? "SSL " : "", port, maxThreads));
return server;
}
private void setStatsOnConnectors(final Server server) {
final boolean isStatsOn = this.props.getBoolean("jetty.connector.stats", true);
logger.info("Setting up connector with stats on: " + isStatsOn);
for (final Connector connector : server.getConnectors()) {
connector.setStatsOn(isStatsOn);
}
}
private SocketConnector getSocketConnector(final int port) {
final SocketConnector connector = new SocketConnector();
connector.setPort(port);
connector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
return connector;
}
private SslSocketConnector getSslSocketConnector(final int sslPortNumber) {
final SslSocketConnector secureConnector = new SslSocketConnector();
secureConnector.setPort(sslPortNumber);
secureConnector.setKeystore(this.props.getString("jetty.keystore"));
secureConnector.setPassword(this.props.getString("jetty.password"));
secureConnector.setKeyPassword(this.props.getString("jetty.keypassword"));
secureConnector.setTruststore(this.props.getString("jetty.truststore"));
secureConnector.setTrustPassword(this.props.getString("jetty.trustpassword"));
secureConnector.setHeaderBufferSize(MAX_HEADER_BUFFER_SIZE);
// set up vulnerable cipher suites to exclude
final List<String> cipherSuitesToExclude = this.props
.getStringList("jetty.excludeCipherSuites");
logger.info("Excluded Cipher Suites: " + String.valueOf(cipherSuitesToExclude));
if (cipherSuitesToExclude != null && !cipherSuitesToExclude.isEmpty()) {
secureConnector.setExcludeCipherSuites(cipherSuitesToExclude.toArray(new String[0]));
}
return secureConnector;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/plugin/PluginRegistry.java
|
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.plugin;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeSet;
public class PluginRegistry {
private static PluginRegistry registry;
public TreeSet<ViewerPlugin> viewerPlugins;
public Map<String, TreeSet<ViewerPlugin>> jobTypeViewerPlugins;
private PluginRegistry() {
this.viewerPlugins = new TreeSet<>(ViewerPlugin.COMPARATOR);
this.jobTypeViewerPlugins = new HashMap<>();
}
public static PluginRegistry getRegistry() {
if (registry == null) {
registry = new PluginRegistry();
}
return registry;
}
public void register(final ViewerPlugin plugin) {
this.viewerPlugins.add(plugin);
final List<String> jobTypes = plugin.getJobTypes();
if (jobTypes == null) {
return;
}
for (final String jobType : jobTypes) {
TreeSet<ViewerPlugin> plugins = null;
if (!this.jobTypeViewerPlugins.containsKey(jobType)) {
plugins = new TreeSet<>(ViewerPlugin.COMPARATOR);
plugins.add(plugin);
this.jobTypeViewerPlugins.put(jobType, plugins);
} else {
plugins = this.jobTypeViewerPlugins.get(jobType);
plugins.add(plugin);
}
}
}
public List<ViewerPlugin> getViewerPlugins() {
return new ArrayList<>(this.viewerPlugins);
}
public List<ViewerPlugin> getViewerPluginsForJobType(final String jobType) {
final TreeSet<ViewerPlugin> plugins = this.jobTypeViewerPlugins.get(jobType);
if (plugins == null) {
return null;
}
return new ArrayList<>(plugins);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/plugin/TriggerPlugin.java
|
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.plugin;
import azkaban.trigger.TriggerAgent;
import azkaban.webapp.servlet.AbstractAzkabanServlet;
public interface TriggerPlugin {
// public TriggerPlugin(String pluginName, Props props, AzkabanWebServer
// azkabanWebApp) {
// this.pluginName = pluginName;
// this.pluginPath = props.getString("trigger.path");
// this.order = props.getInt("trigger.order", 0);
// this.hidden = props.getBoolean("trigger.hidden", false);
//
// }
public AbstractAzkabanServlet getServlet();
public TriggerAgent getAgent();
public void load();
public String getPluginName();
public String getPluginPath();
public int getOrder();
public boolean isHidden();
public void setHidden(boolean hidden);
public String getInputPanelVM();
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/plugin/ViewerPlugin.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.plugin;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
public class ViewerPlugin {
public static final Comparator<ViewerPlugin> COMPARATOR =
new Comparator<ViewerPlugin>() {
@Override
public int compare(final ViewerPlugin o1, final ViewerPlugin o2) {
if (o1.getOrder() != o2.getOrder()) {
return o1.getOrder() - o2.getOrder();
}
return o1.getPluginName().compareTo(o2.getPluginName());
}
};
private final String pluginName;
private final String pluginPath;
private final int order;
private final List<String> jobTypes;
private boolean hidden;
public ViewerPlugin(final String pluginName, final String pluginPath, final int order,
final boolean hidden, final String jobTypes) {
this.pluginName = pluginName;
this.pluginPath = pluginPath;
this.order = order;
this.setHidden(hidden);
this.jobTypes = parseJobTypes(jobTypes);
}
public String getPluginName() {
return this.pluginName;
}
public String getPluginPath() {
return this.pluginPath;
}
public int getOrder() {
return this.order;
}
public boolean isHidden() {
return this.hidden;
}
public void setHidden(final boolean hidden) {
this.hidden = hidden;
}
protected List<String> parseJobTypes(final String jobTypesStr) {
if (jobTypesStr == null) {
return null;
}
final String[] parts = jobTypesStr.split(",");
final List<String> jobTypes = new ArrayList<>();
for (int i = 0; i < parts.length; ++i) {
jobTypes.add(parts[i].trim());
}
return jobTypes;
}
public List<String> getJobTypes() {
return this.jobTypes;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/AbstractAzkabanServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import azkaban.Constants.ConfigurationKeys;
import azkaban.server.AzkabanServer;
import azkaban.server.HttpRequestUtils;
import azkaban.server.session.Session;
import azkaban.utils.JSONUtils;
import azkaban.utils.Props;
import azkaban.utils.TimeUtils;
import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.plugin.PluginRegistry;
import azkaban.webapp.plugin.TriggerPlugin;
import azkaban.webapp.plugin.ViewerPlugin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TimeZone;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.joda.time.DateTime;
/**
* Base Servlet for pages
*/
public abstract class AbstractAzkabanServlet extends HttpServlet {
public static final String JSON_MIME_TYPE = "application/json";
public static final String jarVersion = AbstractAzkabanServlet.class.getPackage()
.getImplementationVersion();
private static final String AZKABAN_SUCCESS_MESSAGE =
"azkaban.success.message";
private static final String AZKABAN_WARN_MESSAGE =
"azkaban.warn.message";
private static final String AZKABAN_FAILURE_MESSAGE =
"azkaban.failure.message";
private static final long serialVersionUID = -1;
protected String passwordPlaceholder;
private AzkabanServer application;
private String name;
private String label;
private String color;
private List<ViewerPlugin> viewerPlugins;
private List<TriggerPlugin> triggerPlugins;
private int displayExecutionPageSize;
public static String createJsonResponse(final String status, final String message,
final String action, final Map<String, Object> params) {
final HashMap<String, Object> response = new HashMap<>();
response.put("status", status);
if (message != null) {
response.put("message", message);
}
if (action != null) {
response.put("action", action);
}
if (params != null) {
response.putAll(params);
}
return JSONUtils.toJSON(response);
}
/**
* To retrieve the application for the servlet
*/
public AzkabanServer getApplication() {
return this.application;
}
@Override
public void init(final ServletConfig config) throws ServletException {
this.application = SERVICE_PROVIDER.getInstance(AzkabanWebServer.class);
if (this.application == null) {
throw new IllegalStateException(
"No batch application is defined in the servlet context!");
}
final Props props = this.application.getServerProps();
this.name = props.getString("azkaban.name", "");
this.label = props.getString("azkaban.label", "");
this.color = props.getString("azkaban.color", "#FF0000");
this.passwordPlaceholder = props.getString("azkaban.password.placeholder", "Password");
this.displayExecutionPageSize = props.getInt(ConfigurationKeys.DISPLAY_EXECUTION_PAGE_SIZE, 16);
if (this.application instanceof AzkabanWebServer) {
final AzkabanWebServer server = (AzkabanWebServer) this.application;
this.viewerPlugins = PluginRegistry.getRegistry().getViewerPlugins();
this.triggerPlugins =
new ArrayList<>(server.getTriggerPlugins().values());
}
}
/**
* Checks for the existance of the parameter in the request
*/
public boolean hasParam(final HttpServletRequest request, final String param) {
return HttpRequestUtils.hasParam(request, param);
}
/**
* Retrieves the param from the http servlet request. Will throw an exception if not found
*/
public String getParam(final HttpServletRequest request, final String name)
throws ServletException {
return HttpRequestUtils.getParam(request, name);
}
/**
* Retrieves the param from the http servlet request.
*/
public String getParam(final HttpServletRequest request, final String name,
final String defaultVal) {
return HttpRequestUtils.getParam(request, name, defaultVal);
}
/**
* Returns the param and parses it into an int. Will throw an exception if not found, or a parse
* error if the type is incorrect.
*/
public int getIntParam(final HttpServletRequest request, final String name)
throws ServletException {
return HttpRequestUtils.getIntParam(request, name);
}
public int getIntParam(final HttpServletRequest request, final String name,
final int defaultVal) {
return HttpRequestUtils.getIntParam(request, name, defaultVal);
}
public long getLongParam(final HttpServletRequest request, final String name)
throws ServletException {
return HttpRequestUtils.getLongParam(request, name);
}
public long getLongParam(final HttpServletRequest request, final String name,
final long defaultVal) {
return HttpRequestUtils.getLongParam(request, name, defaultVal);
}
public Map<String, String> getParamGroup(final HttpServletRequest request,
final String groupName) throws ServletException {
return HttpRequestUtils.getParamGroup(request, groupName);
}
/**
* Returns the session value of the request.
*/
protected void setSessionValue(final HttpServletRequest request, final String key,
final Object value) {
request.getSession(true).setAttribute(key, value);
}
/**
* Adds a session value to the request
*/
protected void addSessionValue(final HttpServletRequest request, final String key,
final Object value) {
List l = (List) request.getSession(true).getAttribute(key);
if (l == null) {
l = new ArrayList();
}
l.add(value);
request.getSession(true).setAttribute(key, l);
}
/**
* Sets an error message in azkaban.failure.message in the cookie. This will be used by the web
* client javascript to somehow display the message
*/
protected void setErrorMessageInCookie(final HttpServletResponse response,
final String errorMsg) {
final Cookie cookie = new Cookie(AZKABAN_FAILURE_MESSAGE, errorMsg);
cookie.setPath("/");
response.addCookie(cookie);
}
/**
* Sets a warning message in azkaban.warn.message in the cookie. This will be used by the web
* client javascript to somehow display the message
*/
protected void setWarnMessageInCookie(final HttpServletResponse response,
final String errorMsg) {
final Cookie cookie = new Cookie(AZKABAN_WARN_MESSAGE, errorMsg);
cookie.setPath("/");
response.addCookie(cookie);
}
/**
* Sets a message in azkaban.success.message in the cookie. This will be used by the web client
* javascript to somehow display the message
*/
protected void setSuccessMessageInCookie(final HttpServletResponse response,
final String message) {
final Cookie cookie = new Cookie(AZKABAN_SUCCESS_MESSAGE, message);
cookie.setPath("/");
response.addCookie(cookie);
}
/**
* Retrieves a success message from a cookie. azkaban.success.message
*/
protected String getSuccessMessageFromCookie(final HttpServletRequest request) {
final Cookie cookie = getCookieByName(request, AZKABAN_SUCCESS_MESSAGE);
if (cookie == null) {
return null;
}
return cookie.getValue();
}
/**
* Retrieves a warn message from a cookie. azkaban.warn.message
*/
protected String getWarnMessageFromCookie(final HttpServletRequest request) {
final Cookie cookie = getCookieByName(request, AZKABAN_WARN_MESSAGE);
if (cookie == null) {
return null;
}
return cookie.getValue();
}
/**
* Retrieves a success message from a cookie. azkaban.failure.message
*/
protected String getErrorMessageFromCookie(final HttpServletRequest request) {
final Cookie cookie = getCookieByName(request, AZKABAN_FAILURE_MESSAGE);
if (cookie == null) {
return null;
}
return cookie.getValue();
}
/**
* Retrieves a cookie by name. Potential issue in performance if a lot of cookie variables are
* used.
*/
protected Cookie getCookieByName(final HttpServletRequest request, final String name) {
final Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (final Cookie cookie : cookies) {
if (name.equals(cookie.getName())) {
return cookie;
}
}
}
return null;
}
/**
* Creates a new velocity page to use. With session.
*/
protected Page newPage(final HttpServletRequest req, final HttpServletResponse resp,
final Session session, final String template) {
final Page page = new Page(req, resp, getApplication().getVelocityEngine(), template);
page.add("version", jarVersion);
page.add("azkaban_name", this.name);
page.add("azkaban_label", this.label);
page.add("azkaban_color", this.color);
page.add("note_type", NoteServlet.type);
page.add("note_message", NoteServlet.message);
page.add("note_url", NoteServlet.url);
page.add("timezone", TimeZone.getDefault().getID());
page.add("currentTime", (new DateTime()).getMillis());
page.add("size", getDisplayExecutionPageSize());
page.add("System", System.class);
page.add("TimeUtils", TimeUtils.class);
page.add("WebUtils", WebUtils.class);
if (session != null && session.getUser() != null) {
page.add("user_id", session.getUser().getUserId());
}
final String errorMsg = getErrorMessageFromCookie(req);
page.add("error_message", errorMsg == null || errorMsg.isEmpty() ? "null"
: errorMsg);
setErrorMessageInCookie(resp, null);
final String warnMsg = getWarnMessageFromCookie(req);
page.add("warn_message", warnMsg == null || warnMsg.isEmpty() ? "null"
: warnMsg);
setWarnMessageInCookie(resp, null);
final String successMsg = getSuccessMessageFromCookie(req);
page.add("success_message",
successMsg == null || successMsg.isEmpty() ? "null" : successMsg);
setSuccessMessageInCookie(resp, null);
// @TODO, allow more than one type of viewer. For time sake, I only install
// the first one
if (this.viewerPlugins != null && !this.viewerPlugins.isEmpty()) {
page.add("viewers", this.viewerPlugins);
}
if (this.triggerPlugins != null && !this.triggerPlugins.isEmpty()) {
page.add("triggerPlugins", this.triggerPlugins);
}
return page;
}
/**
* Creates a new velocity page to use.
*/
protected Page newPage(final HttpServletRequest req, final HttpServletResponse resp,
final String template) {
final Page page = new Page(req, resp, getApplication().getVelocityEngine(), template);
page.add("version", jarVersion);
page.add("azkaban_name", this.name);
page.add("azkaban_label", this.label);
page.add("azkaban_color", this.color);
page.add("note_type", NoteServlet.type);
page.add("note_message", NoteServlet.message);
page.add("note_url", NoteServlet.url);
page.add("timezone", TimeZone.getDefault().getID());
page.add("currentTime", (new DateTime()).getMillis());
page.add("size", getDisplayExecutionPageSize());
// @TODO, allow more than one type of viewer. For time sake, I only install
// the first one
if (this.viewerPlugins != null && !this.viewerPlugins.isEmpty()) {
page.add("viewers", this.viewerPlugins);
final ViewerPlugin plugin = this.viewerPlugins.get(0);
page.add("viewerName", plugin.getPluginName());
page.add("viewerPath", plugin.getPluginPath());
}
if (this.triggerPlugins != null && !this.triggerPlugins.isEmpty()) {
page.add("triggers", this.triggerPlugins);
}
return page;
}
/**
* Writes json out to the stream.
*/
protected void writeJSON(final HttpServletResponse resp, final Object obj)
throws IOException {
writeJSON(resp, obj, false);
}
protected void writeJSON(final HttpServletResponse resp, final Object obj, final boolean pretty)
throws IOException {
resp.setContentType(JSON_MIME_TYPE);
JSONUtils.toJSON(obj, resp.getOutputStream(), true);
}
protected int getDisplayExecutionPageSize() {
return this.displayExecutionPageSize;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/ExecutorServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import azkaban.Constants;
import azkaban.executor.ConnectorParams;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableFlowBase;
import azkaban.executor.ExecutableNode;
import azkaban.executor.ExecutionOptions;
import azkaban.executor.ExecutionOptions.FailureAction;
import azkaban.executor.Executor;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.Flow;
import azkaban.flow.FlowUtils;
import azkaban.flowtrigger.FlowTriggerService;
import azkaban.flowtrigger.TriggerInstance;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.scheduler.Schedule;
import azkaban.scheduler.ScheduleManager;
import azkaban.scheduler.ScheduleManagerException;
import azkaban.server.HttpRequestUtils;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.ExternalLinkUtils;
import azkaban.utils.FileIOUtils.LogData;
import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.webapp.AzkabanWebServer;
import azkaban.webapp.WebMetrics;
import azkaban.webapp.plugin.PluginRegistry;
import azkaban.webapp.plugin.ViewerPlugin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringEscapeUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ExecutorServlet extends LoginAbstractAzkabanServlet {
private static final Logger logger = LoggerFactory.getLogger(ExecutorServlet.class.getName());
private static final long serialVersionUID = 1L;
private WebMetrics webMetrics;
private ProjectManager projectManager;
private FlowTriggerService flowTriggerService;
private ExecutorManagerAdapter executorManagerAdapter;
private ScheduleManager scheduleManager;
private UserManager userManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.userManager = server.getUserManager();
this.projectManager = server.getProjectManager();
this.executorManagerAdapter = server.getExecutorManager();
this.scheduleManager = server.getScheduleManager();
this.flowTriggerService = server.getFlowTriggerService();
// TODO: reallocf fully guicify
this.webMetrics = SERVICE_PROVIDER.getInstance(WebMetrics.class);
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "execid")) {
if (hasParam(req, "job")) {
handleExecutionJobDetailsPage(req, resp, session);
} else {
handleExecutionFlowPageByExecId(req, resp, session);
}
} else if (hasParam(req, "triggerinstanceid")) {
handleExecutionFlowPageByTriggerInstanceId(req, resp, session);
} else {
handleExecutionsPage(req, resp, session);
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
if (hasParam(req, "execid")) {
final int execid = getIntParam(req, "execid");
ExecutableFlow exFlow = null;
try {
exFlow = this.executorManagerAdapter.getExecutableFlow(execid);
} catch (final ExecutorManagerException e) {
ret.put("error",
"Error fetching execution '" + execid + "': " + e.getMessage());
}
if (exFlow == null) {
ret.put("error", "Cannot find execution '" + execid + "'");
} else {
if (ajaxName.equals("fetchexecflow")) {
ajaxFetchExecutableFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchexecflowupdate")) {
ajaxFetchExecutableFlowUpdate(req, resp, ret, session.getUser(),
exFlow);
} else if (ajaxName.equals("cancelFlow")) {
ajaxCancelFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("pauseFlow")) {
ajaxPauseFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("resumeFlow")) {
ajaxResumeFlow(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecFlowLogs")) {
ajaxFetchExecFlowLogs(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecJobLogs")) {
ajaxFetchJobLogs(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("fetchExecJobStats")) {
ajaxFetchJobStats(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("retryFailedJobs")) {
ajaxRestartFailed(req, resp, ret, session.getUser(), exFlow);
} else if (ajaxName.equals("flowInfo")) {
ajaxFetchExecutableFlowInfo(req, resp, ret, session.getUser(), exFlow);
}
}
} else if (ajaxName.equals("ramp")) {
ajaxRampActions(req, resp, ret, session.getUser());
} else if (ajaxName.equals("fetchscheduledflowgraph")) {
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
ajaxFetchScheduledFlowGraph(projectName, flowName, ret, session.getUser());
} else if (ajaxName.equals("reloadExecutors")) {
ajaxReloadExecutors(req, resp, ret, session.getUser());
} else if (ajaxName.equals("enableQueueProcessor")) {
ajaxUpdateQueueProcessor(req, resp, ret, session.getUser(), true);
} else if (ajaxName.equals("disableQueueProcessor")) {
ajaxUpdateQueueProcessor(req, resp, ret, session.getUser(), false);
} else if (ajaxName.equals("getRunning")) {
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
ajaxGetFlowRunning(req, resp, ret, session.getUser(), projectName,
flowName);
} else if (ajaxName.equals("flowInfo")) {
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
ajaxFetchFlowInfo(req, resp, ret, session.getUser(), projectName,
flowName);
} else {
final String projectName = getParam(req, "project");
ret.put("project", projectName);
if (ajaxName.equals("executeFlow")) {
ajaxExecuteFlow(req, resp, ret, session.getUser());
}
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
/**
* <pre>
* Enables queueProcessor if @param status is true
* disables queueProcessor if @param status is false.
* </pre>
*/
private void ajaxUpdateQueueProcessor(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> returnMap, final User user,
final boolean enableQueue) {
boolean wasSuccess = false;
if (HttpRequestUtils.hasPermission(this.userManager, user, Type.ADMIN)) {
try {
if (enableQueue) {
this.executorManagerAdapter.enableQueueProcessorThread();
} else {
this.executorManagerAdapter.disableQueueProcessorThread();
}
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_SUCCESS);
wasSuccess = true;
} catch (final ExecutorManagerException e) {
returnMap.put(ConnectorParams.RESPONSE_ERROR, e.getMessage());
}
} else {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Only Admins are allowed to update queue processor");
}
if (!wasSuccess) {
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_ERROR);
}
}
private void ajaxFetchScheduledFlowGraph(final String projectName, final String flowName,
final HashMap<String, Object> ret, final User user) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
if (project == null) {
ret.put("error", "Project '" + projectName + "' doesn't exist.");
return;
}
try {
final Schedule schedule = this.scheduleManager.getSchedule(project.getId(), flowName);
final ExecutionOptions executionOptions =
schedule != null ? schedule.getExecutionOptions() : new ExecutionOptions();
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put("error", "Flow '" + flowName + "' cannot be found in project " + project);
return;
}
final ExecutableFlow exFlow = new ExecutableFlow(project, flow);
exFlow.setExecutionOptions(executionOptions);
ret.put("submitTime", exFlow.getSubmitTime());
ret.put("submitUser", exFlow.getSubmitUser());
ret.put("execid", exFlow.getExecutionId());
ret.put("projectId", exFlow.getProjectId());
ret.put("project", project.getName());
FlowUtils.applyDisabledJobs(executionOptions.getDisabledJobs(), exFlow);
final Map<String, Object> flowObj = getExecutableNodeInfo(exFlow);
ret.putAll(flowObj);
} catch (final ScheduleManagerException ex) {
throw new ServletException(ex);
}
}
/* Reloads executors from DB and azkaban.properties via executorManager */
private void ajaxReloadExecutors(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> returnMap, final User user) {
boolean wasSuccess = false;
if (HttpRequestUtils.hasPermission(this.userManager, user, Type.ADMIN)) {
try {
this.executorManagerAdapter.setupExecutors();
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_SUCCESS);
wasSuccess = true;
} catch (final ExecutorManagerException e) {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Failed to refresh the executors " + e.getMessage());
}
} else {
returnMap.put(ConnectorParams.RESPONSE_ERROR,
"Only Admins are allowed to refresh the executors");
}
if (!wasSuccess) {
returnMap.put(ConnectorParams.STATUS_PARAM,
ConnectorParams.RESPONSE_ERROR);
}
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
}
}
private void handleExecutionJobDetailsPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page = newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobdetailspage.vm");
final User user = session.getUser();
final int execId = getIntParam(req, "execid");
final String jobId = getParam(req, "job");
final int attempt = getIntParam(req, "attempt", 0);
page.add("execid", execId);
page.add("jobid", jobId);
page.add("attempt", attempt);
ExecutableFlow flow = null;
ExecutableNode node = null;
try {
flow = this.executorManagerAdapter.getExecutableFlow(execId);
if (flow == null) {
page.add("errorMsg", "Error loading executing flow " + execId
+ ": not found.");
page.render();
return;
}
node = flow.getExecutableNodePath(jobId);
if (node == null) {
page.add("errorMsg", "Job " + jobId + " doesn't exist in " + flow.getExecutionId());
return;
}
final List<ViewerPlugin> jobViewerPlugins = PluginRegistry.getRegistry()
.getViewerPluginsForJobType(node.getType());
page.add("jobViewerPlugins", jobViewerPlugins);
} catch (final ExecutorManagerException e) {
page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
page.render();
return;
}
final int projectId = flow.getProjectId();
final Project project =
getProjectPageByPermission(page, projectId, user, Type.READ);
if (project == null) {
page.render();
return;
}
final Map<String, String> jobLogUrlsByAppId = this.executorManagerAdapter
.getExternalJobLogUrls(flow, jobId, attempt);
page.add("jobLogUrlsByAppId", jobLogUrlsByAppId);
page.add("projectName", project.getName());
page.add("flowid", flow.getId());
page.add("parentflowid", node.getParentFlow().getFlowId());
page.add("jobname", node.getId());
page.add("jobType", node.getType());
page.add("attemptStatus", attempt == node.getAttempt() ?
node.getStatus() : node.getPastAttemptList().get(attempt).getStatus());
page.add("pastAttempts", node.getAttempt() > 0 ?
node.getPastAttemptList().size() : 0);
page.add("jobFailed", node.getStatus() == Status.FAILED || node.getStatus() == Status.KILLED);
page.render();
}
private void handleExecutionsPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executionspage.vm");
final List<Pair<ExecutableFlow, Optional<Executor>>> runningFlows =
this.executorManagerAdapter.getActiveFlowsWithExecutor();
page.add("runningFlows", runningFlows.isEmpty() ? null : runningFlows);
final List<ExecutableFlow> finishedFlows =
this.executorManagerAdapter.getRecentlyFinishedFlows();
page.add("recentlyFinished", finishedFlows.isEmpty() ? null : finishedFlows);
page.add("vmutils", new VelocityUtil(this.projectManager));
page.render();
}
private void handleExecutionFlowPageByTriggerInstanceId(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executingflowpage.vm");
final User user = session.getUser();
final String triggerInstanceId = getParam(req, "triggerinstanceid");
final TriggerInstance triggerInst = this.flowTriggerService
.findTriggerInstanceById(triggerInstanceId);
if (triggerInst == null) {
page.add("errorMsg", "Error loading trigger instance " + triggerInstanceId
+ " not found.");
page.render();
return;
}
page.add("triggerInstanceId", triggerInstanceId);
page.add("execid", triggerInst.getFlowExecId());
final int projectId = triggerInst.getProject().getId();
final Project project =
getProjectPageByPermission(page, projectId, user, Type.READ);
if (project == null) {
page.render();
return;
}
addExternalLinkLabel(req, page);
page.add("projectId", project.getId());
page.add("projectName", project.getName());
page.add("flowid", triggerInst.getFlowId());
page.render();
}
private void addExternalLinkLabel(final HttpServletRequest req, final Page page) {
final Props props = getApplication().getServerProps();
final String execExternalLinkURL = ExternalLinkUtils.getExternalAnalyzerOnReq(props, req);
if (execExternalLinkURL.length() > 0) {
page.add("executionExternalLinkURL", execExternalLinkURL);
logger.debug("Added an External analyzer to the page");
logger.debug("External analyzer url: " + execExternalLinkURL);
final String execExternalLinkLabel =
props.getString(Constants.ConfigurationKeys.AZKABAN_SERVER_EXTERNAL_ANALYZER_LABEL,
"External Analyzer");
page.add("executionExternalLinkLabel", execExternalLinkLabel);
logger.debug("External analyzer label set to : " + execExternalLinkLabel);
}
}
private void handleExecutionFlowPageByExecId(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executingflowpage.vm");
final User user = session.getUser();
final int execId = getIntParam(req, "execid");
page.add("execid", execId);
page.add("triggerInstanceId", "-1");
ExecutableFlow flow = null;
try {
flow = this.executorManagerAdapter.getExecutableFlow(execId);
if (flow == null) {
page.add("errorMsg", "Error loading executing flow " + execId
+ " not found.");
page.render();
return;
}
} catch (final ExecutorManagerException e) {
page.add("errorMsg", "Error loading executing flow: " + e.getMessage());
page.render();
return;
}
final int projectId = flow.getProjectId();
final Project project =
getProjectPageByPermission(page, projectId, user, Type.READ);
if (project == null) {
page.render();
return;
}
addExternalLinkLabel(req, page);
page.add("projectId", project.getId());
page.add("projectName", project.getName());
page.add("flowid", flow.getFlowId());
// check the current flow definition to see if the flow is locked.
final Flow currentFlow = project.getFlow(flow.getFlowId());
boolean isCurrentFlowLocked = false;
if (currentFlow != null) {
isCurrentFlowLocked = currentFlow.isLocked();
} else {
logger.info("Flow {} not found in project {}.", flow.getFlowId(), project.getName());
}
page.add("isLocked", isCurrentFlowLocked);
page.render();
}
protected Project getProjectPageByPermission(final Page page, final int projectId,
final User user, final Permission.Type type) {
final Project project = this.projectManager.getProject(projectId);
if (project == null) {
page.add("errorMsg", "Project " + project + " not found.");
} else if (!hasPermission(project, user, type)) {
page.add("errorMsg",
"User " + user.getUserId() + " doesn't have " + type.name()
+ " permissions on " + project.getName());
} else {
return project;
}
return null;
}
protected Project getProjectAjaxByPermission(final Map<String, Object> ret,
final String projectName, final User user, final Permission.Type type) {
return filterProjectByPermission(this.projectManager.getProject(projectName), user, type, ret);
}
protected Project getProjectAjaxByPermission(final Map<String, Object> ret,
final int projectId, final User user, final Permission.Type type) {
return filterProjectByPermission(this.projectManager.getProject(projectId), user, type, ret);
}
private void ajaxRestartFailed(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
if (exFlow.getStatus() == Status.FAILED
|| exFlow.getStatus() == Status.SUCCEEDED) {
ret.put("error", "Flow has already finished. Please re-execute.");
return;
}
try {
this.executorManagerAdapter.retryFailures(exFlow, user.getUserId());
} catch (final ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
/**
* Gets the logs through plain text stream to reduce memory overhead.
*/
private void ajaxFetchExecFlowLogs(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
final long startMs = System.currentTimeMillis();
final Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
final int offset = this.getIntParam(req, "offset");
final int length = this.getIntParam(req, "length");
resp.setCharacterEncoding("utf-8");
try {
final LogData data = this.executorManagerAdapter.getExecutableFlowLog(exFlow, offset, length);
ret.putAll(appendLogData(data, offset));
} catch (final ExecutorManagerException e) {
throw new ServletException(e);
}
/*
* We originally consider leverage Drop Wizard's Timer API {@link com.codahale.metrics.Timer}
* to measure the duration time.
* However, Timer will result in too many accompanying metrics (e.g., min, max, 99th quantile)
* regarding one metrics. We decided to use gauge to do that and monitor how it behaves.
*/
this.webMetrics.setFetchLogLatency(System.currentTimeMillis() - startMs);
}
/**
* Gets the logs through ajax plain text stream to reduce memory overhead.
*/
private void ajaxFetchJobLogs(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
final Project project = getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
final int offset = this.getIntParam(req, "offset");
final int length = this.getIntParam(req, "length");
final String jobId = this.getParam(req, "jobId");
resp.setCharacterEncoding("utf-8");
try {
final ExecutableNode node = exFlow.getExecutableNodePath(jobId);
if (node == null) {
ret.put("error", "Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
return;
}
final int attempt = this.getIntParam(req, "attempt", node.getAttempt());
final LogData data = this.executorManagerAdapter
.getExecutionJobLog(exFlow, jobId, offset, length, attempt);
ret.putAll(appendLogData(data, offset));
} catch (final ExecutorManagerException e) {
throw new ServletException(e);
}
}
private Map<String, Object> appendLogData(final LogData data, final int defaultOffset) {
final Map<String, Object> parameters = new HashMap<>();
if (data == null) {
parameters.put("length", 0);
parameters.put("offset", defaultOffset);
parameters.put("data", "");
} else {
parameters.put("length", data.getLength());
parameters.put("offset", data.getOffset());
parameters.put("data", StringEscapeUtils.escapeHtml(data.getData()));
}
return parameters;
}
private void ajaxFetchJobStats(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
final String jobId = this.getParam(req, "jobid");
resp.setCharacterEncoding("utf-8");
try {
final ExecutableNode node = exFlow.getExecutableNodePath(jobId);
if (node == null) {
ret.put("error",
"Job " + jobId + " doesn't exist in " + exFlow.getExecutionId());
return;
}
final List<Object> jsonObj =
this.executorManagerAdapter
.getExecutionJobStats(exFlow, jobId, node.getAttempt());
ret.put("jobStats", jsonObj);
} catch (final ExecutorManagerException e) {
ret.put("error", "Error retrieving stats for job " + jobId);
return;
}
}
private void ajaxFetchFlowInfo(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final String projectName, final String flowId) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.READ);
if (project == null) {
return;
}
final Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put("error", "Error loading flow. Flow " + flowId
+ " doesn't exist in " + projectName);
return;
}
ret.put("successEmails", flow.getSuccessEmails());
ret.put("failureEmails", flow.getFailureEmails());
Schedule sflow = null;
try {
for (final Schedule sched : this.scheduleManager.getSchedules()) {
if (sched.getProjectId() == project.getId()
&& sched.getFlowName().equals(flowId)) {
sflow = sched;
break;
}
}
} catch (final ScheduleManagerException e) {
// TODO Auto-generated catch block
throw new ServletException(e);
}
if (sflow != null) {
ret.put("scheduled", sflow.getNextExecTime());
}
}
private void ajaxFetchExecutableFlowInfo(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exflow) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exflow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
final Flow flow = project.getFlow(exflow.getFlowId());
if (flow == null) {
ret.put("error", "Error loading flow. Flow " + exflow.getFlowId()
+ " doesn't exist in " + exflow.getProjectId());
return;
}
final ExecutionOptions options = exflow.getExecutionOptions();
ret.put("successEmails", options.getSuccessEmails());
ret.put("failureEmails", options.getFailureEmails());
ret.put("flowParam", options.getFlowParameters());
final FailureAction action = options.getFailureAction();
String failureAction = null;
switch (action) {
case FINISH_CURRENTLY_RUNNING:
failureAction = "finishCurrent";
break;
case CANCEL_ALL:
failureAction = "cancelImmediately";
break;
case FINISH_ALL_POSSIBLE:
failureAction = "finishPossible";
break;
}
ret.put("failureAction", failureAction);
ret.put("notifyFailureFirst", options.getNotifyOnFirstFailure());
ret.put("notifyFailureLast", options.getNotifyOnLastFailure());
ret.put("failureEmailsOverride", options.isFailureEmailsOverridden());
ret.put("successEmailsOverride", options.isSuccessEmailsOverridden());
ret.put("concurrentOptions", options.getConcurrentOption());
ret.put("pipelineLevel", options.getPipelineLevel());
ret.put("pipelineExecution", options.getPipelineExecutionId());
ret.put("queueLevel", options.getQueueLevel());
final HashMap<String, String> nodeStatus = new HashMap<>();
for (final ExecutableNode node : exflow.getExecutableNodes()) {
nodeStatus.put(node.getId(), node.getStatus().toString());
}
ret.put("nodeStatus", nodeStatus);
ret.put("disabled", options.getDisabledJobs());
}
private void ajaxCancelFlow(final HttpServletRequest req, final HttpServletResponse resp,
final HashMap<String, Object> ret, final User user, final ExecutableFlow exFlow)
throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
this.executorManagerAdapter.cancelFlow(exFlow, user.getUserId());
} catch (final ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxGetFlowRunning(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final String projectId, final String flowId) throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, projectId, user, Type.EXECUTE);
if (project == null) {
return;
}
final List<Integer> refs =
this.executorManagerAdapter.getRunningFlows(project.getId(), flowId);
if (!refs.isEmpty()) {
ret.put("execIds", refs);
}
}
private void ajaxPauseFlow(final HttpServletRequest req, final HttpServletResponse resp,
final HashMap<String, Object> ret, final User user, final ExecutableFlow exFlow)
throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
this.executorManagerAdapter.pauseFlow(exFlow, user.getUserId());
} catch (final ExecutorManagerException e) {
ret.put("error", e.getMessage());
}
}
private void ajaxResumeFlow(final HttpServletRequest req, final HttpServletResponse resp,
final HashMap<String, Object> ret, final User user, final ExecutableFlow exFlow)
throws ServletException {
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user,
Type.EXECUTE);
if (project == null) {
return;
}
try {
this.executorManagerAdapter.resumeFlow(exFlow, user.getUserId());
} catch (final ExecutorManagerException e) {
ret.put("resume", e.getMessage());
}
}
private Map<String, Object> getExecutableFlowUpdateInfo(final ExecutableNode node,
final long lastUpdateTime) {
final HashMap<String, Object> nodeObj = new HashMap<>();
if (node instanceof ExecutableFlowBase) {
final ExecutableFlowBase base = (ExecutableFlowBase) node;
final ArrayList<Map<String, Object>> nodeList =
new ArrayList<>();
for (final ExecutableNode subNode : base.getExecutableNodes()) {
final Map<String, Object> subNodeObj =
getExecutableFlowUpdateInfo(subNode, lastUpdateTime);
if (!subNodeObj.isEmpty()) {
nodeList.add(subNodeObj);
}
}
if (!nodeList.isEmpty()) {
nodeObj.put("flow", base.getFlowId());
nodeObj.put("nodes", nodeList);
}
}
if (node.getUpdateTime() > lastUpdateTime || !nodeObj.isEmpty()) {
nodeObj.put("id", node.getId());
nodeObj.put("status", node.getStatus());
nodeObj.put("startTime", node.getStartTime());
nodeObj.put("endTime", node.getEndTime());
nodeObj.put("updateTime", node.getUpdateTime());
nodeObj.put("attempt", node.getAttempt());
if (node.getAttempt() > 0) {
nodeObj.put("pastAttempts", node.getAttemptObjects());
}
}
return nodeObj;
}
private Map<String, Object> getExecutableNodeInfo(final ExecutableNode node) {
final HashMap<String, Object> nodeObj = new HashMap<>();
nodeObj.put("id", node.getId());
nodeObj.put("status", node.getStatus());
nodeObj.put("startTime", node.getStartTime());
nodeObj.put("endTime", node.getEndTime());
nodeObj.put("updateTime", node.getUpdateTime());
nodeObj.put("type", node.getType());
if (node.getCondition() != null) {
nodeObj.put("condition", node.getCondition());
}
nodeObj.put("nestedId", node.getNestedId());
nodeObj.put("attempt", node.getAttempt());
if (node.getAttempt() > 0) {
nodeObj.put("pastAttempts", node.getAttemptObjects());
}
if (node.getInNodes() != null && !node.getInNodes().isEmpty()) {
nodeObj.put("in", node.getInNodes());
}
if (node instanceof ExecutableFlowBase) {
final ExecutableFlowBase base = (ExecutableFlowBase) node;
final ArrayList<Map<String, Object>> nodeList =
new ArrayList<>();
for (final ExecutableNode subNode : base.getExecutableNodes()) {
final Map<String, Object> subNodeObj = getExecutableNodeInfo(subNode);
if (!subNodeObj.isEmpty()) {
nodeList.add(subNodeObj);
}
}
nodeObj.put("flow", base.getFlowId());
nodeObj.put("nodes", nodeList);
nodeObj.put("flowId", base.getFlowId());
}
return nodeObj;
}
private void ajaxFetchExecutableFlowUpdate(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
final Long lastUpdateTime = Long.parseLong(getParam(req, "lastUpdateTime"));
logger.info("Fetching " + exFlow.getExecutionId());
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
final Map<String, Object> map =
getExecutableFlowUpdateInfo(exFlow, lastUpdateTime);
map.put("status", exFlow.getStatus());
map.put("startTime", exFlow.getStartTime());
map.put("endTime", exFlow.getEndTime());
map.put("updateTime", exFlow.getUpdateTime());
ret.putAll(map);
}
private void ajaxFetchExecutableFlow(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user,
final ExecutableFlow exFlow) throws ServletException {
logger.info("Fetching " + exFlow.getExecutionId());
final Project project =
getProjectAjaxByPermission(ret, exFlow.getProjectId(), user, Type.READ);
if (project == null) {
return;
}
ret.put("submitTime", exFlow.getSubmitTime());
ret.put("submitUser", exFlow.getSubmitUser());
ret.put("execid", exFlow.getExecutionId());
ret.put("projectId", exFlow.getProjectId());
ret.put("project", project.getName());
final Map<String, Object> flowObj = getExecutableNodeInfo(exFlow);
ret.putAll(flowObj);
}
private void ajaxExecuteFlow(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user)
throws ServletException {
final String projectName = getParam(req, "project");
final String flowId = getParam(req, "flow");
final Project project =
getProjectAjaxByPermission(ret, projectName, user, Type.EXECUTE);
if (project == null) {
ret.put("error", "Project '" + projectName + "' doesn't exist.");
return;
}
ret.put("flow", flowId);
final Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put("error", "Flow '" + flowId + "' cannot be found in project " + project);
return;
}
final ExecutableFlow exflow = FlowUtils.createExecutableFlow(project, flow);
exflow.setSubmitUser(user.getUserId());
final ExecutionOptions options = HttpRequestUtils.parseFlowOptions(req);
exflow.setExecutionOptions(options);
if (!options.isFailureEmailsOverridden()) {
options.setFailureEmails(flow.getFailureEmails());
}
if (!options.isSuccessEmailsOverridden()) {
options.setSuccessEmails(flow.getSuccessEmails());
}
options.setMailCreator(flow.getMailCreator());
try {
HttpRequestUtils.filterAdminOnlyFlowParams(this.userManager, options, user);
final String message =
this.executorManagerAdapter.submitExecutableFlow(exflow, user.getUserId());
ret.put("message", message);
} catch (final Exception e) {
e.printStackTrace();
ret.put("error",
"Error submitting flow " + exflow.getFlowId() + ". " + e.getMessage());
}
ret.put("execid", exflow.getExecutionId());
}
private void ajaxRampActions(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret, final User user)
throws ServletException {
try {
Object body = HttpRequestUtils.getJsonBody(req);
if (HttpRequestUtils.hasPermission(this.userManager, user, Type.ADMIN)) {
Map<String, String> result = new HashMap<>();
if (body instanceof List) { // A list of actions
List<Map<String, Object>> rampActions = (List<Map<String, Object>>)body;
result = this.executorManagerAdapter.doRampActions(rampActions);
} else if (body instanceof Map) {
List<Map<String, Object>> rampActions = new ArrayList<>();
rampActions.add((Map<String, Object>) body);
result = this.executorManagerAdapter.doRampActions(rampActions);
} else {
result.put("error", "Invalid Body Format");
}
ret.putAll(result);
}
} catch (final Exception e) {
e.printStackTrace();
ret.put("error", "Error on update Ramp. " + e.getMessage());
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/FlowTriggerInstanceServlet.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.flowtrigger.CancellationCause;
import azkaban.flowtrigger.DependencyInstance;
import azkaban.flowtrigger.FlowTriggerService;
import azkaban.flowtrigger.TriggerInstance;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.server.session.Session;
import azkaban.user.Permission.Type;
import azkaban.webapp.AzkabanWebServer;
import com.google.gson.GsonBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
public class FlowTriggerInstanceServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(FlowTriggerInstanceServlet.class);
private FlowTriggerService triggerService;
private ProjectManager projectManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.triggerService = server.getFlowTriggerService();
this.projectManager = server.getProjectManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
handlePage(req, resp, session);
}
}
private void handlePage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/executingflowtriggerspage.vm");
page.add("runningTriggers", this.triggerService.getRunningTriggers());
page.add("recentTriggers", this.triggerService.getRecentlyFinished());
page.add("vmutils", new ExecutorVMHelper());
page.render();
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
//todo chengren311: add permission control
if (ajaxName.equals("fetchRunningTriggers")) {
ajaxFetchRunningTriggerInstances(ret);
} else if (ajaxName.equals("killRunningTrigger")) {
if (hasParam(req, "id")) {
final String triggerInstanceId = getParam(req, "id");
ajaxKillTriggerInstance(triggerInstanceId, session, ret);
} else {
ret.put("error", "please specify a valid running trigger instance id");
}
} else if (ajaxName.equals("showTriggerProperties")) {
if (hasParam(req, "id")) {
final String triggerInstanceId = getParam(req, "id");
loadTriggerProperties(triggerInstanceId, ret);
} else {
ret.put("error", "please specify a valid running trigger instance id");
}
} else if (ajaxName.equals("fetchTriggerStatus")) {
if (hasParam(req, "triggerinstid")) {
final String triggerInstanceId = getParam(req, "triggerinstid");
ajaxFetchTriggerInstanceByTriggerInstId(triggerInstanceId, session, ret);
} else if (hasParam(req, "execid")) {
final int execId = getIntParam(req, "execid");
ajaxFetchTriggerInstanceByExecId(execId, session, ret);
} else {
ret.put("error", "please specify a valid trigger instance id or flow execution id");
}
} else if (ajaxName.equals("fetchTriggerInstances")) {
if (hasParam(req, "project") && hasParam(req, "flow")) {
final String projectName = getParam(req, "project");
final String flowId = getParam(req, "flow");
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
ret.put("error", "please specify a valid project name");
} else if (!hasPermission(project, session.getUser(), Type.READ)) {
ret.put("error", "Permission denied. Need READ access.");
} else {
ajaxFetchTriggerInstances(project.getId(), flowId, ret, req);
}
} else {
ret.put("error", "please specify project id and flow id");
}
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private void ajaxFetchTriggerInstances(
final int projectId,
final String flowId,
final HashMap<String, Object> ret,
final HttpServletRequest req)
throws ServletException {
final int from = Integer.valueOf(getParam(req, "start"));
final int length = Integer.valueOf(getParam(req, "length"));
final Collection<TriggerInstance> triggerInstances = this.triggerService
.getTriggerInstances(projectId, flowId, from, length);
ret.put("flow", flowId);
ret.put("total", triggerInstances.size());
ret.put("from", from);
ret.put("length", length);
final List<Object> history = new ArrayList<>();
for (final TriggerInstance instance : triggerInstances) {
final HashMap<String, Object> triggerInfo = new HashMap<>();
triggerInfo.put("instanceId", instance.getId());
triggerInfo.put("submitUser", instance.getSubmitUser());
triggerInfo.put("startTime", instance.getStartTime());
triggerInfo.put("endTime", instance.getEndTime());
triggerInfo.put("status", instance.getStatus().toString());
history.add(triggerInfo);
}
ret.put("executions", history);
}
private void loadTriggerProperties(final String triggerInstanceId,
final HashMap<String, Object> ret) {
final TriggerInstance triggerInstance = this.triggerService
.findTriggerInstanceById(triggerInstanceId);
if (triggerInstance != null) {
ret.put("triggerProperties", triggerInstance.getFlowTrigger().toString());
} else {
ret.put("error", "the trigger instance doesn't exist");
}
}
private void wrapTriggerInst(final TriggerInstance triggerInst,
final HashMap<String, Object> ret) {
final List<Map<String, Object>> dependencyOutput = new ArrayList<>();
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
final Map<String, Object> depMap = new HashMap<>();
depMap.put("triggerInstanceId", depInst.getTriggerInstance().getId());
depMap.put("dependencyName", depInst.getDepName());
depMap.put("dependencyType", depInst.getTriggerInstance().getFlowTrigger()
.getDependencyByName(depInst.getDepName()).getType());
depMap.put("dependencyStartTime", depInst.getStartTime());
depMap.put("dependencyEndTime", depInst.getEndTime());
depMap.put("dependencyStatus", depInst.getStatus());
depMap.put("dependencyCancelCause", depInst.getCancellationCause());
depMap.put("dependencyConfig", depInst.getTriggerInstance().getFlowTrigger()
.getDependencyByName(depInst.getDepName()));
dependencyOutput.add(depMap);
}
ret.put("items", dependencyOutput);
ret.put("triggerId", triggerInst.getId());
ret.put("triggerSubmitter", triggerInst.getSubmitUser());
ret.put("triggerStartTime", triggerInst.getStartTime());
ret.put("triggerEndTime", triggerInst.getEndTime());
ret.put("triggerStatus", triggerInst.getStatus());
final String flowTriggerJson = new GsonBuilder().setPrettyPrinting().create()
.toJson(triggerInst.getFlowTrigger());
ret.put("triggerProps", flowTriggerJson);
}
private void ajaxFetchTriggerInstanceByExecId(final int execId, final Session session,
final HashMap<String, Object> ret) {
final TriggerInstance triggerInst = this.triggerService
.findTriggerInstanceByExecId(execId);
if (triggerInst != null) {
wrapTriggerInst(triggerInst, ret);
}
}
private void ajaxFetchTriggerInstanceByTriggerInstId(final String triggerInstanceId,
final Session session, final HashMap<String, Object> ret) {
final TriggerInstance triggerInst = this.triggerService
.findTriggerInstanceById(triggerInstanceId);
if (triggerInst != null) {
wrapTriggerInst(triggerInst, ret);
}
}
private void ajaxKillTriggerInstance(final String triggerInstanceId, final Session session,
final HashMap<String, Object> ret) {
final TriggerInstance triggerInst = this.triggerService
.findRunningTriggerInstById(triggerInstanceId);
if (triggerInst != null) {
if (hasPermission(triggerInst.getProject(), session.getUser(), Type.EXECUTE)) {
this.triggerService.cancelTriggerInstance(triggerInst, CancellationCause.MANUAL);
} else {
ret.put("error", "no permission to kill the trigger");
}
} else {
ret.put("error", "the trigger doesn't exist, might already finished or cancelled");
}
}
private void ajaxFetchRunningTriggerInstances(final HashMap<String, Object> ret) throws
ServletException {
final Collection<TriggerInstance> triggerInstanceList = this.triggerService
.getRunningTriggers();
final List<HashMap<String, Object>> output = new ArrayList<>();
ret.put("items", output);
for (final TriggerInstance triggerInstance : triggerInstanceList) {
writeTriggerInstancesData(output, triggerInstance);
}
}
private void writeTriggerInstancesData(final List<HashMap<String, Object>> output,
final TriggerInstance triggerInst) {
final HashMap<String, Object> data = new HashMap<>();
data.put("id", triggerInst.getId());
data.put("starttime", triggerInst.getStartTime());
data.put("endtime", triggerInst.getEndTime());
data.put("status", triggerInst.getStatus());
data.put("flowExecutionId", triggerInst.getFlowExecId());
data.put("submitUser", triggerInst.getSubmitUser());
data.put("flowTriggerConfig", triggerInst.getFlowTrigger());
final List<Map<String, Object>> dependencyOutput = new ArrayList<>();
for (final DependencyInstance depInst : triggerInst.getDepInstances()) {
final Map<String, Object> depMap = new HashMap<>();
depMap.put("dependencyName", depInst.getDepName());
depMap.put("dependencyStarttime", depInst.getStartTime());
depMap.put("dependencyEndtime", depInst.getEndTime());
depMap.put("dependencyStatus", depInst.getStatus());
depMap.put("dependencyConfig", depInst.getTriggerInstance().getFlowTrigger()
.getDependencyByName
(depInst.getDepName()));
dependencyOutput.add(depMap);
}
data.put("dependencies", dependencyOutput);
output.add(data);
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
}
}
/**
* @param cronTimezone represents the timezone from remote API call
* @return if the string is equal to UTC, we return UTC; otherwise, we always return default
* timezone.
*/
private DateTimeZone parseTimeZone(final String cronTimezone) {
if (cronTimezone != null && cronTimezone.equals("UTC")) {
return DateTimeZone.UTC;
}
return DateTimeZone.getDefault();
}
private DateTime getPresentTimeByTimezone(final DateTimeZone timezone) {
return new DateTime(timezone);
}
public class ExecutorVMHelper {
public String getProjectName(final int id) {
final Project project = FlowTriggerInstanceServlet.this.projectManager.getProject(id);
if (project == null) {
return String.valueOf(id);
}
return project.getName();
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/FlowTriggerServlet.java
|
/*
* Copyright 2018 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.flowtrigger.quartz.FlowTriggerScheduler;
import azkaban.flowtrigger.quartz.FlowTriggerScheduler.ScheduledFlowTrigger;
import azkaban.project.CronSchedule;
import azkaban.project.FlowTrigger;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.server.session.Session;
import azkaban.user.Permission.Type;
import azkaban.utils.TimeUtils;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class FlowTriggerServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = 1L;
private FlowTriggerScheduler scheduler;
private ProjectManager projectManager;
private static final Logger logger = LoggerFactory.getLogger(FlowTriggerServlet.class);
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.scheduler = server.getScheduler();
this.projectManager = server.getProjectManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
handlePage(req, resp, session);
}
}
private void ajaxFetchTrigger(final int projectId, final String flowId, final Session session,
final HashMap<String,
Object> ret) {
final ScheduledFlowTrigger res = this.scheduler
.getScheduledFlowTriggerJobs().stream().filter(
scheduledFlowTrigger -> scheduledFlowTrigger.getFlowId().equals(flowId)
&& scheduledFlowTrigger.getProjectId
() == projectId).findFirst().orElse(null);
if (res != null) {
final Map<String, Object> jsonObj = new HashMap<>();
FlowTrigger flowTrigger = res.getFlowTrigger();
CronSchedule schedule = flowTrigger.getSchedule();
jsonObj.put("cronExpression", schedule.getCronExpression());
jsonObj.put("submitUser", res.getSubmitUser());
jsonObj.put("firstSchedTime",
TimeUtils.formatDateTime(res.getQuartzTrigger().getStartTime().getTime()));
jsonObj.put("nextExecTime",
TimeUtils.formatDateTime(res.getQuartzTrigger().getNextFireTime().getTime()));
Long maxWaitMin = null;
if (flowTrigger.getMaxWaitDuration().isPresent()) {
maxWaitMin = flowTrigger.getMaxWaitDuration().get().toMinutes();
}
jsonObj.put("maxWaitMin", maxWaitMin);
if (!flowTrigger.getDependencies().isEmpty()) {
jsonObj.put("dependencies", res.getDependencyListJson());
}
ret.put("flowTrigger", jsonObj);
}
}
private boolean checkProjectIdAndFlowId(final HttpServletRequest req) {
return hasParam(req, "projectId") && hasParam(req, "flowId");
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
if (ajaxName.equals("fetchTrigger")) {
if (checkProjectIdAndFlowId(req)) {
final int projectId = getIntParam(req, "projectId");
final String flowId = getParam(req, "flowId");
ajaxFetchTrigger(projectId, flowId, session, ret);
}
} else if (ajaxName.equals("pauseTrigger") || ajaxName.equals("resumeTrigger")) {
if (checkProjectIdAndFlowId(req)) {
final int projectId = getIntParam(req, "projectId");
final String flowId = getParam(req, "flowId");
final Project project = this.projectManager.getProject(projectId);
if (project == null) {
ret.put("error", "please specify a valid project id");
} else if (!hasPermission(project, session.getUser(), Type.ADMIN)) {
ret.put("error", "Permission denied. Need ADMIN access.");
} else {
try {
if (ajaxName.equals("pauseTrigger")) {
if (this.scheduler.pauseFlowTriggerIfPresent(projectId, flowId)) {
logger.info("Flow trigger for flow {}.{} is paused", project.getName(), flowId);
} else {
logger.warn("Flow trigger for flow {}.{} doesn't exist", project.getName(), flowId);
}
} else {
if (this.scheduler.resumeFlowTriggerIfPresent(projectId, flowId)) {
logger.info("Flow trigger for flow {}.{} is resumed", project.getName(), flowId);
} else {
logger.warn("Flow trigger for flow {}.{} doesn't exist", project.getName(), flowId);
}
}
ret.put("status", "success");
} catch (final SchedulerException ex) {
ret.put("error", ex.getMessage());
}
}
}
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private void handlePage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/flowtriggerspage.vm");
page.add("flowTriggers", this.scheduler.getScheduledFlowTriggerJobs());
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/HistoryServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.project.ProjectManager;
import azkaban.server.session.Session;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.joda.time.format.DateTimeFormat;
public class HistoryServlet extends LoginAbstractAzkabanServlet {
private static final String FILTER_BY_DATE_PATTERN = "MM/dd/yyyy hh:mm aa";
private static final long serialVersionUID = 1L;
private ExecutorManagerAdapter executorManagerAdapter;
private ProjectManager projectManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.executorManagerAdapter = server.getExecutorManager();
this.projectManager = server.getProjectManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "days")) {
handleHistoryDayPage(req, resp, session);
} else if (hasParam(req, "timeline")) {
handleHistoryTimelinePage(req, resp, session);
} else {
handleHistoryPage(req, resp, session);
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
if (ajaxName.equals("fetch")) {
fetchHistoryData(req, resp, ret);
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private void fetchHistoryData(final HttpServletRequest req,
final HttpServletResponse resp, final HashMap<String, Object> ret)
throws ServletException {
}
private void handleHistoryPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/historypage.vm");
int pageNum = getIntParam(req, "page", 1);
final int pageSize = getIntParam(req, "size", getDisplayExecutionPageSize());
page.add("vmutils", new VelocityUtil(this.projectManager));
if (pageNum < 0) {
pageNum = 1;
}
List<ExecutableFlow> history = null;
if (hasParam(req, "advfilter")) {
final String projContain = getParam(req, "projcontain");
final String flowContain = getParam(req, "flowcontain");
final String userContain = getParam(req, "usercontain");
final int status = getIntParam(req, "status");
final String begin = getParam(req, "begin");
final long beginTime =
"".equals(begin) ? -1 : DateTimeFormat.forPattern(FILTER_BY_DATE_PATTERN)
.parseDateTime(begin).getMillis();
final String end = getParam(req, "end");
final long endTime =
"".equals(end) ? -1 : DateTimeFormat.forPattern(FILTER_BY_DATE_PATTERN)
.parseDateTime(end).getMillis();
try {
history =
this.executorManagerAdapter.getExecutableFlows(projContain, flowContain,
userContain, status, beginTime, endTime, (pageNum - 1)
* pageSize, pageSize);
} catch (final ExecutorManagerException e) {
page.add("error", e.getMessage());
}
} else if (hasParam(req, "search")) {
final String searchTerm = getParam(req, "searchterm");
try {
history =
this.executorManagerAdapter.getExecutableFlows(searchTerm, (pageNum - 1)
* pageSize, pageSize);
} catch (final ExecutorManagerException e) {
page.add("error", e.getMessage());
}
} else {
try {
history =
this.executorManagerAdapter.getExecutableFlows((pageNum - 1) * pageSize,
pageSize);
} catch (final ExecutorManagerException e) {
e.printStackTrace();
}
}
page.add("flowHistory", history);
page.add("size", pageSize);
page.add("page", pageNum);
// keep the search terms so that we can navigate to later pages
if (hasParam(req, "searchterm") && !getParam(req, "searchterm").equals("")) {
page.add("search", "true");
page.add("search_term", getParam(req, "searchterm"));
}
if (hasParam(req, "advfilter")) {
page.add("advfilter", "true");
page.add("projcontain", getParam(req, "projcontain"));
page.add("flowcontain", getParam(req, "flowcontain"));
page.add("usercontain", getParam(req, "usercontain"));
page.add("status", getIntParam(req, "status"));
page.add("begin", getParam(req, "begin"));
page.add("end", getParam(req, "end"));
}
if (pageNum == 1) {
page.add("previous", new PageSelection(1, pageSize, true, false));
} else {
page.add("previous", new PageSelection(pageNum - 1, pageSize, false,
false));
}
page.add("next", new PageSelection(pageNum + 1, pageSize, false, false));
// Now for the 5 other values.
int pageStartValue = 1;
if (pageNum > 3) {
pageStartValue = pageNum - 2;
}
page.add("page1", new PageSelection(pageStartValue, pageSize, false,
pageStartValue == pageNum));
pageStartValue++;
page.add("page2", new PageSelection(pageStartValue, pageSize, false,
pageStartValue == pageNum));
pageStartValue++;
page.add("page3", new PageSelection(pageStartValue, pageSize, false,
pageStartValue == pageNum));
pageStartValue++;
page.add("page4", new PageSelection(pageStartValue, pageSize, false,
pageStartValue == pageNum));
pageStartValue++;
page.add("page5", new PageSelection(pageStartValue, pageSize, false,
pageStartValue == pageNum));
pageStartValue++;
page.render();
}
private void handleHistoryTimelinePage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) {
}
private void handleHistoryDayPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) {
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
}
public static class PageSelection {
private final int page;
private final int size;
private final boolean disabled;
private boolean selected;
public PageSelection(final int page, final int size, final boolean disabled,
final boolean selected) {
this.page = page;
this.size = size;
this.disabled = disabled;
this.setSelected(selected);
}
public int getPage() {
return this.page;
}
public int getSize() {
return this.size;
}
public boolean getDisabled() {
return this.disabled;
}
public boolean isSelected() {
return this.selected;
}
public void setSelected(final boolean selected) {
this.selected = selected;
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/IndexRedirectServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.server.session.Session;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
/**
* The main page
*/
public class IndexRedirectServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = -1;
private String defaultServletPath;
public IndexRedirectServlet(final String defaultServletPath) {
this.defaultServletPath = defaultServletPath;
if (this.defaultServletPath.isEmpty()
|| this.defaultServletPath.equals("/")) {
this.defaultServletPath = "/index";
}
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
resp.sendRedirect(this.defaultServletPath);
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
resp.sendRedirect(this.defaultServletPath);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/JMXHttpServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.executor.ConnectorParams;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.server.session.Session;
import azkaban.trigger.TriggerManager;
import azkaban.user.UserManager;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import javax.management.MBeanInfo;
import javax.management.ObjectName;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
/**
* Limited set of jmx calls for when you cannot attach to the jvm
*/
public class JMXHttpServlet extends LoginAbstractAzkabanServlet implements
ConnectorParams {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(JMXHttpServlet.class
.getName());
private UserManager userManager;
private AzkabanWebServer server;
private ExecutorManagerAdapter executorManagerAdapter;
private TriggerManager triggerManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
this.server = (AzkabanWebServer) getApplication();
this.userManager = this.server.getUserManager();
this.executorManagerAdapter = this.server.getExecutorManager();
this.triggerManager = this.server.getTriggerManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
Map<String, Object> ret = new HashMap<>();
final String ajax = getParam(req, "ajax");
if (JMX_GET_ALL_EXECUTOR_ATTRIBUTES.equals(ajax)) {
if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_HOSTPORT)) {
ret.put("error", "Parameters '" + JMX_MBEAN + "' and '"
+ JMX_HOSTPORT + "' must be set");
this.writeJSON(resp, ret, true);
return;
}
final String hostPort = getParam(req, JMX_HOSTPORT);
final String mbean = getParam(req, JMX_MBEAN);
final Map<String, Object> result =
this.executorManagerAdapter.callExecutorJMX(hostPort,
JMX_GET_ALL_MBEAN_ATTRIBUTES, mbean);
// order the attribute by name
for (final Map.Entry<String, Object> entry : result.entrySet()) {
if (entry.getValue() instanceof Map) {
final Map<String, Object> entryValue = (Map<String, Object>) entry.getValue();
result.put(entry.getKey(), new TreeMap<>(entryValue));
}
}
ret = result;
} else if (JMX_GET_MBEANS.equals(ajax)) {
ret.put("mbeans", this.server.getMBeanRegistrationManager().getMBeanNames());
} else if (JMX_GET_MBEAN_INFO.equals(ajax)) {
if (hasParam(req, JMX_MBEAN)) {
final String mbeanName = getParam(req, JMX_MBEAN);
try {
final ObjectName name = new ObjectName(mbeanName);
final MBeanInfo info = this.server.getMBeanRegistrationManager().getMBeanInfo(name);
ret.put("attributes", info.getAttributes());
ret.put("description", info.getDescription());
} catch (final Exception e) {
logger.error(e);
ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
}
} else {
ret.put("error", "No 'mbean' name parameter specified");
}
} else if (JMX_GET_MBEAN_ATTRIBUTE.equals(ajax)) {
if (!hasParam(req, JMX_MBEAN) || !hasParam(req, JMX_ATTRIBUTE)) {
ret.put("error", "Parameters 'mbean' and 'attribute' must be set");
} else {
final String mbeanName = getParam(req, JMX_MBEAN);
final String attribute = getParam(req, JMX_ATTRIBUTE);
try {
final ObjectName name = new ObjectName(mbeanName);
final Object obj = this.server.getMBeanRegistrationManager()
.getMBeanAttribute(name, attribute);
ret.put("value", obj);
} catch (final Exception e) {
logger.error(e);
ret.put("error", "'" + mbeanName + "' is not a valid mBean name");
}
}
} else if (JMX_GET_ALL_MBEAN_ATTRIBUTES.equals(ajax)) {
if (!hasParam(req, JMX_MBEAN)) {
ret.put("error", "Parameters 'mbean' must be set");
} else {
ret.putAll(
this.server.getMBeanRegistrationManager().getMBeanResult(getParam(req, JMX_MBEAN)));
}
} else {
ret.put("commands", new String[]{
JMX_GET_MBEANS,
JMX_GET_MBEAN_INFO + "&" + JMX_MBEAN + "=<name>",
JMX_GET_MBEAN_ATTRIBUTE + "&" + JMX_MBEAN + "=<name>&"
+ JMX_ATTRIBUTE + "=<attributename>"});
}
this.writeJSON(resp, ret, true);
} else {
handleJMXPage(req, resp, session);
}
}
private void handleJMXPage(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jmxpage.vm");
page.add("mbeans", this.server.getMBeanRegistrationManager().getMBeanNames());
final Map<String, Object> executorMBeans = new HashMap<>();
for (final String hostPort : this.executorManagerAdapter.getAllActiveExecutorServerHosts()) {
try {
final Map<String, Object> mbeans =
this.executorManagerAdapter.callExecutorJMX(hostPort, JMX_GET_MBEANS, null);
executorMBeans.put(hostPort, mbeans.get("mbeans"));
} catch (final IOException e) {
logger.error("Cannot contact executor " + hostPort, e);
}
}
page.add("executorRemoteMBeans", executorMBeans);
final Map<String, Object> triggerserverMBeans = new HashMap<>();
triggerserverMBeans.put(this.triggerManager.getJMX().getPrimaryServerHost(),
this.triggerManager.getJMX().getAllJMXMbeans());
page.add("triggerserverRemoteMBeans", triggerserverMBeans);
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/LoginAbstractAzkabanServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import static azkaban.ServiceProvider.SERVICE_PROVIDER;
import azkaban.project.Project;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.Role;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.user.UserManagerException;
import azkaban.utils.StringUtils;
import azkaban.webapp.WebMetrics;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.io.IOUtils;
import org.apache.log4j.Logger;
/**
* Abstract Servlet that handles auto login when the session hasn't been verified.
*/
public abstract class LoginAbstractAzkabanServlet extends AbstractAzkabanServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger
.getLogger(LoginAbstractAzkabanServlet.class.getName());
private static final String SESSION_ID_NAME = "azkaban.browser.session.id";
private static final int DEFAULT_UPLOAD_DISK_SPOOL_SIZE = 20 * 1024 * 1024;
private static final HashMap<String, String> contextType =
new HashMap<>();
static {
contextType.put(".js", "application/javascript");
contextType.put(".css", "text/css");
contextType.put(".png", "image/png");
contextType.put(".jpeg", "image/jpeg");
contextType.put(".gif", "image/gif");
contextType.put(".jpg", "image/jpeg");
contextType.put(".eot", "application/vnd.ms-fontobject");
contextType.put(".svg", "image/svg+xml");
contextType.put(".ttf", "application/octet-stream");
contextType.put(".woff", "application/x-font-woff");
}
private final WebMetrics webMetrics = SERVICE_PROVIDER.getInstance(WebMetrics.class);
private File webResourceDirectory = null;
private MultipartParser multipartParser;
private boolean shouldLogRawUserAgent = false;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
this.multipartParser = new MultipartParser(DEFAULT_UPLOAD_DISK_SPOOL_SIZE);
this.shouldLogRawUserAgent =
getApplication().getServerProps().getBoolean("accesslog.raw.useragent",
false);
}
public void setResourceDirectory(final File file) {
this.webResourceDirectory = file;
}
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
throws ServletException, IOException {
this.webMetrics.markWebGetCall();
// Set session id
final Session session = getSessionFromRequest(req);
logRequest(req, session);
if (hasParam(req, "logout")) {
resp.sendRedirect(req.getContextPath());
if (session != null) {
getApplication().getSessionCache()
.removeSession(session.getSessionId());
}
return;
}
if (session != null) {
if (logger.isDebugEnabled()) {
logger.debug("Found session " + session.getUser());
}
if (handleFileGet(req, resp)) {
return;
}
handleGet(req, resp, session);
} else {
if (hasParam(req, "ajax")) {
final HashMap<String, String> retVal = new HashMap<>();
retVal.put("error", "session");
this.writeJSON(resp, retVal);
} else {
handleLogin(req, resp);
}
}
}
/**
* Log out request - the format should be close to Apache access log format
*/
private void logRequest(final HttpServletRequest req, final Session session) {
final StringBuilder buf = new StringBuilder();
buf.append(WebUtils.getRealClientIpAddr(req)).append(" ");
if (session != null && session.getUser() != null) {
buf.append(session.getUser().getUserId()).append(" ");
} else {
buf.append(" - ").append(" ");
}
buf.append("\"");
buf.append(req.getMethod()).append(" ");
buf.append(req.getRequestURI()).append(" ");
if (req.getQueryString() != null && !isIllegalPostRequest(req)) {
buf.append(req.getQueryString()).append(" ");
} else {
buf.append("-").append(" ");
}
buf.append(req.getProtocol()).append("\" ");
final String userAgent = req.getHeader("User-Agent");
if (this.shouldLogRawUserAgent) {
buf.append(userAgent);
} else {
// simply log a short string to indicate browser or not
if (StringUtils.isFromBrowser(userAgent)) {
buf.append("browser");
} else {
buf.append("not-browser");
}
}
logger.info(buf.toString());
}
private boolean handleFileGet(final HttpServletRequest req, final HttpServletResponse resp)
throws IOException {
if (this.webResourceDirectory == null) {
return false;
}
// Check if it's a resource
final String prefix = req.getContextPath() + req.getServletPath();
final String path = req.getRequestURI().substring(prefix.length());
final int index = path.lastIndexOf('.');
if (index == -1) {
return false;
}
final String extension = path.substring(index);
if (contextType.containsKey(extension)) {
final File file = new File(this.webResourceDirectory, path);
if (!file.exists() || !file.isFile()) {
return false;
}
resp.setContentType(contextType.get(extension));
final OutputStream output = resp.getOutputStream();
BufferedInputStream input = null;
try {
input = new BufferedInputStream(new FileInputStream(file));
IOUtils.copy(input, output);
} finally {
if (input != null) {
input.close();
}
}
output.flush();
return true;
}
return false;
}
private Session getSessionFromRequest(final HttpServletRequest req)
throws ServletException {
final Cookie cookie = getCookieByName(req, SESSION_ID_NAME);
String sessionId = null;
if (cookie != null) {
sessionId = cookie.getValue();
}
if (sessionId == null && hasParam(req, "session.id")) {
sessionId = getParam(req, "session.id");
}
return getSessionFromSessionId(sessionId);
}
private Session getSessionFromSessionId(final String sessionId) {
if (sessionId == null) {
return null;
}
return getApplication().getSessionCache().getSession(sessionId);
}
private void handleLogin(final HttpServletRequest req, final HttpServletResponse resp)
throws ServletException, IOException {
handleLogin(req, resp, null);
}
private void handleLogin(final HttpServletRequest req, final HttpServletResponse resp,
final String errorMsg) {
final Page page = newPage(req, resp, "azkaban/webapp/servlet/velocity/login.vm");
page.add("passwordPlaceholder", this.passwordPlaceholder);
if (errorMsg != null) {
page.add("errorMsg", errorMsg);
}
page.render();
}
@Override
protected void doPost(final HttpServletRequest req, final HttpServletResponse resp)
throws ServletException, IOException {
Session session = getSessionFromRequest(req);
this.webMetrics.markWebPostCall();
logRequest(req, session);
if (isIllegalPostRequest(req)) {
writeResponse(resp, "Login error. Must pass username and password in request body");
return;
}
// Handle Multipart differently from other post messages
if (ServletFileUpload.isMultipartContent(req)) {
final Map<String, Object> params = this.multipartParser.parseMultipart(req);
if (session == null) {
// See if the session id is properly set.
if (params.containsKey("session.id")) {
final String sessionId = (String) params.get("session.id");
session = getSessionFromSessionId(sessionId);
if (session != null) {
handleMultiformPost(req, resp, params, session);
return;
}
}
// if there's no valid session, see if it's a one time session.
if (!params.containsKey("username") || !params.containsKey("password")) {
writeResponse(resp, "Login error. Need username and password");
return;
}
final String username = (String) params.get("username");
final String password = (String) params.get("password");
final String ip = WebUtils.getRealClientIpAddr(req);
try {
session = createSession(username, password, ip);
} catch (final UserManagerException e) {
writeResponse(resp, "Login error: " + e.getMessage());
return;
}
}
handleMultiformPost(req, resp, params, session);
} else if (hasParam(req, "action")
&& getParam(req, "action").equals("login")) {
final HashMap<String, Object> obj = new HashMap<>();
handleAjaxLoginAction(req, resp, obj);
this.writeJSON(resp, obj);
} else if (session == null) {
if (hasParam(req, "username") && hasParam(req, "password")) {
// If it's a post command with curl, we create a temporary session
try {
session = createSession(req);
} catch (final UserManagerException e) {
writeResponse(resp, "Login error: " + e.getMessage());
}
handlePost(req, resp, session);
} else {
// There are no valid sessions and temporary logins, no we either pass
// back a message or redirect.
if (isAjaxCall(req)) {
final String response =
AbstractAzkabanServlet
.createJsonResponse("error", "Invalid Session. Need to re-login",
"login", null);
writeResponse(resp, response);
} else {
handleLogin(req, resp, "Enter username and password");
}
}
} else {
handlePost(req, resp, session);
}
}
/**
* Disallows users from logging in by passing their username and password via the request header
* where it'd be logged.
*
* Example of illegal post request: curl -X POST http://localhost:8081/?action=login\&username=azkaban\&password=azkaban
*
* req.getParameterMap() or req.getParameterNames() cannot be used because they draw no
* distinction between the illegal request above and the following valid request: curl -X POST -d
* "action=login&username=azkaban&password=azkaban" http://localhost:8081/
*
* "password=" is searched for because it leverages the query syntax to determine that the user
* is passing the password as a parameter name. There is no other ajax call that has a parameter
* that includes the string "password" at the end which could throw false positives.
*/
private boolean isIllegalPostRequest(final HttpServletRequest req) {
return (req.getQueryString() != null && req.getQueryString().contains("password="));
}
private Session createSession(final HttpServletRequest req)
throws UserManagerException, ServletException {
final String username = getParam(req, "username");
final String password = getParam(req, "password");
final String ip = WebUtils.getRealClientIpAddr(req);
return createSession(username, password, ip);
}
private Session createSession(final String username, final String password, final String ip)
throws UserManagerException {
final UserManager manager = getApplication().getUserManager();
final User user = manager.getUser(username, password);
final String randomUID = UUID.randomUUID().toString();
final Session session = new Session(randomUID, user, ip);
return session;
}
protected boolean hasPermission(final Project project, final User user,
final Permission.Type type) {
final UserManager userManager = getApplication().getUserManager();
if (project.hasPermission(user, type)) {
return true;
}
for (final String roleName : user.getRoles()) {
final Role role = userManager.getRole(roleName);
if (role.getPermission().isPermissionSet(type)
|| role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
return true;
}
}
return false;
}
/**
* Filter Project based on user authorization
*
* @param project project
* @param user user
* @param type permission allowance
* @return authorized project itself or null if the project is not authorized
*/
protected Project filterProjectByPermission(final Project project, final User user,
final Permission.Type type) {
return filterProjectByPermission(project, user, type, null);
}
/**
* Filter Project based on user authorization
*
* @param project project
* @param user user
* @param type permission allowance
* @param ret return map for holding messages
* @return authorized project itself or null if the project is not authorized
*/
protected Project filterProjectByPermission(final Project project, final User user,
final Permission.Type type, final Map<String, Object> ret) {
if (project == null) {
if (ret != null) {
ret.put("error", "Project 'null' not found.");
}
} else if (!hasPermission(project, user, type)) {
if (ret != null) {
ret.put("error",
"User '" + user.getUserId() + "' doesn't have " + type.name()
+ " permissions on " + project.getName());
}
} else {
return project;
}
return null;
}
protected void handleAjaxLoginAction(final HttpServletRequest req,
final HttpServletResponse resp, final Map<String, Object> ret)
throws ServletException {
if (hasParam(req, "username") && hasParam(req, "password")) {
Session session = null;
try {
session = createSession(req);
} catch (final UserManagerException e) {
ret.put("error", "Incorrect Login. " + e.getMessage());
return;
}
final Cookie cookie = new Cookie(SESSION_ID_NAME, session.getSessionId());
cookie.setPath("/");
resp.addCookie(cookie);
final Set<Session> sessionsOfSameIP =
getApplication().getSessionCache().findSessionsByIP(session.getIp());
// Check potential DDoS attack by bad hosts.
logger.info(
"Session id created for user '" + session.getUser().getUserId() + "' and ip " + session
.getIp() + ", " + sessionsOfSameIP.size() + " session(s) found from this IP");
final boolean sessionAdded = getApplication().getSessionCache().addSession(session);
if (sessionAdded) {
ret.put("status", "success");
ret.put("session.id", session.getSessionId());
} else {
ret.put("error", "Potential DDoS found, the number of sessions for this user and IP "
+ "reached allowed limit (" + getApplication().getSessionCache()
.getMaxNumberOfSessionsPerIpPerUser().get() + ").");
}
} else {
ret.put("error", "Incorrect Login.");
}
}
protected void writeResponse(final HttpServletResponse resp, final String response)
throws IOException {
final Writer writer = resp.getWriter();
writer.append(response);
writer.flush();
}
protected boolean isAjaxCall(final HttpServletRequest req) {
final String value = req.getHeader("X-Requested-With");
if (value != null) {
logger.info("has X-Requested-With " + value);
return value.equals("XMLHttpRequest");
}
return false;
}
/**
* The get request is handed off to the implementor after the user is logged in.
*/
protected abstract void handleGet(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException;
/**
* The post request is handed off to the implementor after the user is logged in.
*/
protected abstract void handlePost(HttpServletRequest req,
HttpServletResponse resp, Session session) throws ServletException,
IOException;
/**
* The post request is handed off to the implementor after the user is logged in.
*/
protected void handleMultiformPost(final HttpServletRequest req,
final HttpServletResponse resp, final Map<String, Object> multipart, final Session session)
throws ServletException, IOException {
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/MultipartParser.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileUploadException;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
public class MultipartParser {
private final DiskFileItemFactory _uploadItemFactory;
public MultipartParser(final int spillToDiskSize) {
this._uploadItemFactory = new DiskFileItemFactory();
this._uploadItemFactory.setSizeThreshold(spillToDiskSize);
}
public Map<String, Object> parseMultipart(final HttpServletRequest request)
throws IOException, ServletException {
final ServletFileUpload upload = new ServletFileUpload(this._uploadItemFactory);
List<FileItem> items = null;
try {
items = upload.parseRequest(request);
} catch (final FileUploadException e) {
throw new ServletException(e);
}
final Map<String, Object> params = new HashMap<>();
for (final FileItem item : items) {
if (item.isFormField()) {
params.put(item.getFieldName(), item.getString());
} else {
params.put(item.getFieldName(), item);
}
}
return params;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/NoteServlet.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp.servlet;
import azkaban.server.HttpRequestUtils;
import azkaban.server.session.Session;
import azkaban.user.Permission.Type;
import azkaban.user.User;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
public class NoteServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(NoteServlet.class);
public static String type = null;
public static String message = null;
public static String url = null;
private AzkabanWebServer server;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
this.server = (AzkabanWebServer) getApplication();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (isAdmin(session.getUser())) {
handleNotePageLoad(req, resp, session);
} else {
warningNonAdminUsers(resp, "The requested user doesn't have admin permission");
}
}
private void warningNonAdminUsers(final HttpServletResponse resp, final String message)
throws IOException {
final HashMap<String, Object> ret = new HashMap<>();
ret.put("error", message);
this.writeJSON(resp, ret);
}
private void handleNotePageLoad(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page = newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/notepage.vm");
page.add("note_type", type);
page.add("note_message", message);
page.add("note_url", url);
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (isAdmin(session.getUser()) && hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
warningNonAdminUsers(resp, "The requested user doesn't have admin permission, "
+ "Or the HTTP request doesn't include ajax.");
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
try {
if (ajaxName.equals("addNote")) {
ajaxAddNotes(req, ret);
} else if (ajaxName.equals("removeNote")) {
ajaxRemoveNotes(ret);
} else {
ret.put("error", "Can not find the ajax operation");
}
} catch (final Exception e) {
ret.put("error", e.getMessage());
}
this.writeJSON(resp, ret);
}
private void ajaxAddNotes(final HttpServletRequest req,
final Map<String, Object> ret) throws ServletException {
type = getParam(req, "type");
message = getParam(req, "message");
url = getParam(req, "url");
logger.info("receive note message. Type: " + type + " message: " + message + " url: " + url);
ret.put("status", "success");
}
private void ajaxRemoveNotes(final Map<String, Object> ret) throws ServletException {
type = null;
message = null;
url = null;
logger.info("removing note from memory.");
ret.put("status", "success");
}
private boolean isAdmin(final User user) {
return HttpRequestUtils.hasPermission(this.server.getUserManager(), user, Type.ADMIN);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/Page.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.utils.Utils;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.velocity.VelocityContext;
import org.apache.velocity.app.VelocityEngine;
import org.apache.velocity.tools.generic.EscapeTool;
/**
* A page to display
*/
public class Page {
private static final String DEFAULT_MIME_TYPE = "text/html";
private final HttpServletRequest request;
private final HttpServletResponse response;
private final VelocityEngine engine;
private final VelocityContext context;
private final String template;
private String mimeType = DEFAULT_MIME_TYPE;
/**
* Creates a page and sets up the velocity engine to render
*/
public Page(final HttpServletRequest request, final HttpServletResponse response,
final VelocityEngine engine, final String template) {
this.request = Utils.nonNull(request);
this.response = Utils.nonNull(response);
this.engine = Utils.nonNull(engine);
this.template = Utils.nonNull(template);
this.context = new VelocityContext();
this.context.put("esc", new EscapeTool());
this.context.put("session", request.getSession(true));
this.context.put("context", request.getContextPath());
}
/**
* Renders the page in UTF-8
*/
public void render() {
try {
sanitizeContextContents();
this.response.setHeader("Content-type", "text/html; charset=UTF-8");
this.response.setCharacterEncoding("UTF-8");
this.response.setContentType(this.mimeType);
this.engine.mergeTemplate(this.template, "UTF-8", this.context, this.response.getWriter());
} catch (final Exception e) {
throw new PageRenderException(e);
}
}
/**
* Adds variables to the velocity context.
*/
public void add(final String name, final Object value) {
this.context.put(name, value);
}
private void sanitizeContextContents() {
for(Object key: this.context.getKeys()) {
Object value = this.context.get((String)key);
if(key instanceof String && value instanceof String) {
this.context.put((String)key, escapeHtmlExceptLineBreaks((String)value));
}
}
}
private static String escapeHtmlExceptLineBreaks(String value) {
// Convert line breaks to custom newline marker
String converted = value.replaceAll("<br/?>", "!NEWLINE!");
// Escape the converted string
String escaped = StringEscapeUtils.escapeHtml(converted);
// Convert newlines back to <br>
return escaped.replaceAll("!NEWLINE!", "<br>");
}
public void setMimeType(final String type) {
this.mimeType = type;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/PageRenderException.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
/**
* Thrown if there is an error rendering the page
*/
public class PageRenderException extends RuntimeException {
private static final long serialVersionUID = -1;
public PageRenderException(final Throwable cause) {
super(cause);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/PageUtils.java
|
package azkaban.webapp.servlet;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.user.UserUtils;
public final class PageUtils {
private PageUtils() {
}
/**
* Method hides the upload button for regular users from relevant pages when the property
* "lockdown.upload.projects" is set. The button is displayed for admin users and users with
* upload permissions.
*/
public static void hideUploadButtonWhenNeeded(final Page page, final Session session,
final UserManager userManager,
final Boolean lockdownUploadProjects) {
final User user = session.getUser();
if (lockdownUploadProjects && !UserUtils.hasPermissionforAction(userManager, user,
Permission.Type.UPLOADPROJECTS)) {
page.add("hideUploadProject", true);
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/ProjectManagerServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.Constants;
import azkaban.Constants.ConfigurationKeys;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableJobInfo;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.executor.Status;
import azkaban.flow.Edge;
import azkaban.flow.Flow;
import azkaban.flow.FlowProps;
import azkaban.flow.Node;
import azkaban.flowtrigger.quartz.FlowTriggerScheduler;
import azkaban.project.Project;
import azkaban.project.ProjectFileHandler;
import azkaban.project.ProjectLogEvent;
import azkaban.project.ProjectLogEvent.EventType;
import azkaban.project.ProjectManager;
import azkaban.project.ProjectManagerException;
import azkaban.project.ProjectWhitelist;
import azkaban.project.validator.ValidationReport;
import azkaban.project.validator.ValidatorConfigs;
import azkaban.scheduler.Schedule;
import azkaban.scheduler.ScheduleManager;
import azkaban.scheduler.ScheduleManagerException;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.Role;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.user.UserUtils;
import azkaban.utils.JSONUtils;
import azkaban.utils.Pair;
import azkaban.utils.Props;
import azkaban.utils.PropsUtils;
import azkaban.utils.Utils;
import azkaban.webapp.AzkabanWebServer;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.io.Writer;
import java.security.AccessControlException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.quartz.SchedulerException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class ProjectManagerServlet extends LoginAbstractAzkabanServlet {
static final String FLOW_IS_LOCKED_PARAM = "isLocked";
static final String FLOW_NAME_PARAM = "flowName";
static final String FLOW_ID_PARAM = "flowId";
static final String ERROR_PARAM = "error";
static final String FLOW_LOCK_ERROR_MESSAGE_PARAM = "flowLockErrorMessage";
private static final String APPLICATION_ZIP_MIME_TYPE = "application/zip";
private static final long serialVersionUID = 1;
private static final Logger logger = LoggerFactory.getLogger(ProjectManagerServlet.class);
private static final NodeLevelComparator NODE_LEVEL_COMPARATOR =
new NodeLevelComparator();
private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
"lockdown.create.projects";
private static final String LOCKDOWN_UPLOAD_PROJECTS_KEY =
"lockdown.upload.projects";
private static final String PROJECT_DOWNLOAD_BUFFER_SIZE_IN_BYTES =
"project.download.buffer.size";
private static final Comparator<Flow> FLOW_ID_COMPARATOR = new Comparator<Flow>() {
@Override
public int compare(final Flow f1, final Flow f2) {
return f1.getId().compareTo(f2.getId());
}
};
private ProjectManager projectManager;
private ExecutorManagerAdapter executorManagerAdapter;
private ScheduleManager scheduleManager;
private UserManager userManager;
private FlowTriggerScheduler scheduler;
private int downloadBufferSize;
private boolean lockdownCreateProjects = false;
private boolean lockdownUploadProjects = false;
private boolean enableQuartz = false;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.projectManager = server.getProjectManager();
this.executorManagerAdapter = server.getExecutorManager();
this.scheduleManager = server.getScheduleManager();
this.userManager = server.getUserManager();
this.scheduler = server.getScheduler();
this.lockdownCreateProjects =
server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
this.enableQuartz = server.getServerProps().getBoolean(ConfigurationKeys.ENABLE_QUARTZ, false);
if (this.lockdownCreateProjects) {
logger.info("Creation of projects is locked down");
}
this.lockdownUploadProjects =
server.getServerProps().getBoolean(LOCKDOWN_UPLOAD_PROJECTS_KEY, false);
if (this.lockdownUploadProjects) {
logger.info("Uploading of projects is locked down");
}
this.downloadBufferSize =
server.getServerProps().getInt(PROJECT_DOWNLOAD_BUFFER_SIZE_IN_BYTES,
8192);
logger.info("downloadBufferSize: " + this.downloadBufferSize);
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "project")) {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "logs")) {
handleProjectLogsPage(req, resp, session);
} else if (hasParam(req, "permissions")) {
handlePermissionPage(req, resp, session);
} else if (hasParam(req, "prop")) {
handlePropertyPage(req, resp, session);
} else if (hasParam(req, "history")) {
handleJobHistoryPage(req, resp, session);
} else if (hasParam(req, "job")) {
handleJobPage(req, resp, session);
} else if (hasParam(req, "flow")) {
handleFlowPage(req, resp, session);
} else if (hasParam(req, "delete")) {
handleRemoveProject(req, resp, session);
} else if (hasParam(req, "purge")) {
handlePurgeProject(req, resp, session);
} else if (hasParam(req, "download")) {
handleDownloadProject(req, resp, session);
} else {
handleProjectPage(req, resp, session);
}
return;
} else if (hasParam(req, "reloadProjectWhitelist")) {
handleReloadProjectWhitelist(req, resp, session);
}
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectpage.vm");
page.add("errorMsg", "No project set.");
page.render();
}
@Override
protected void handleMultiformPost(final HttpServletRequest req,
final HttpServletResponse resp, final Map<String, Object> params, final Session session)
throws ServletException, IOException {
// Looks like a duplicate, but this is a move away from the regular
// multiform post + redirect
// to a more ajax like command.
if (params.containsKey("ajax")) {
final String action = (String) params.get("ajax");
final HashMap<String, String> ret = new HashMap<>();
if (action.equals("upload")) {
ajaxHandleUpload(req, resp, ret, params, session);
}
this.writeJSON(resp, ret);
} else if (params.containsKey("action")) {
final String action = (String) params.get("action");
if (action.equals("upload")) {
handleUpload(req, resp, params, session);
}
}
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else if (hasParam(req, "action")) {
final String action = getParam(req, "action");
if (action.equals("create")) {
handleCreate(req, resp, session);
}
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final String projectName = getParam(req, "project");
final User user = session.getUser();
final HashMap<String, Object> ret = new HashMap<>();
ret.put("project", projectName);
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
ret.put(ERROR_PARAM, "Project " + projectName + " doesn't exist.");
} else {
ret.put("projectId", project.getId());
final String ajaxName = getParam(req, "ajax");
if (ajaxName.equals("getProjectId")) {
// Do nothing, since projectId is added to all AJAX requests.
} else if (ajaxName.equals("fetchProjectLogs")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchProjectLogEvents(project, req, ret);
}
} else if (ajaxName.equals("fetchflowjobs")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlow(project, ret, req);
}
} else if (ajaxName.equals("fetchflowdetails")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowDetails(project, ret, req);
}
} else if (ajaxName.equals("fetchflowgraph")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowGraph(project, ret, req);
}
} else if (ajaxName.equals("fetchflownodedata")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowNodeData(project, ret, req);
}
} else if (ajaxName.equals("fetchprojectflows")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchProjectFlows(project, ret, req);
}
} else if (ajaxName.equals("changeDescription")) {
if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
ajaxChangeDescription(project, ret, req, user);
}
} else if (ajaxName.equals("getPermissions")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxGetPermissions(project, ret);
}
} else if (ajaxName.equals("getGroupPermissions")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxGetGroupPermissions(project, ret);
}
} else if (ajaxName.equals("getProxyUsers")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxGetProxyUsers(project, ret);
}
} else if (ajaxName.equals("changePermission")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxChangePermissions(project, ret, req, user);
}
} else if (ajaxName.equals("addPermission")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxAddPermission(project, ret, req, user);
}
} else if (ajaxName.equals("addProxyUser")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxAddProxyUser(project, ret, req, user);
}
} else if (ajaxName.equals("removeProxyUser")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxRemoveProxyUser(project, ret, req, user);
}
} else if (ajaxName.equals("fetchFlowExecutions")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchFlowExecutions(project, ret, req);
}
} else if (ajaxName.equals("fetchLastSuccessfulFlowExecution")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchLastSuccessfulFlowExecution(project, ret, req);
}
} else if (ajaxName.equals("fetchJobInfo")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxFetchJobInfo(project, ret, req);
}
} else if (ajaxName.equals("setJobOverrideProperty")) {
if (handleAjaxPermission(project, user, Type.WRITE, ret)) {
ajaxSetJobOverrideProperty(project, ret, req, user);
}
} else if (ajaxName.equals("checkForWritePermission")) {
ajaxCheckForWritePermission(project, user, ret);
} else if (ajaxName.equals("setFlowLock")) {
if (handleAjaxPermission(project, user, Type.ADMIN, ret)) {
ajaxSetFlowLock(project, ret, req);
}
} else if (ajaxName.equals("isFlowLocked")) {
if (handleAjaxPermission(project, user, Type.READ, ret)) {
ajaxIsFlowLocked(project, ret, req);
}
} else {
ret.put(ERROR_PARAM, "Cannot execute command " + ajaxName);
}
}
this.writeJSON(resp, ret);
}
private boolean handleAjaxPermission(final Project project, final User user, final Type type,
final Map<String, Object> ret) {
if (hasPermission(project, user, type)) {
return true;
}
ret.put(ERROR_PARAM, "Permission denied. Need " + type.toString() + " access.");
return false;
}
private void ajaxFetchProjectLogEvents(final Project project,
final HttpServletRequest req, final HashMap<String, Object> ret) throws ServletException {
final int num = this.getIntParam(req, "size", 1000);
final int skip = this.getIntParam(req, "skip", 0);
final List<ProjectLogEvent> logEvents;
try {
logEvents = this.projectManager.getProjectEventLogs(project, num, skip);
} catch (final ProjectManagerException e) {
throw new ServletException(e);
}
final String[] columns = new String[]{"user", "time", "type", "message"};
ret.put("columns", columns);
final List<Object[]> eventData = new ArrayList<>();
for (final ProjectLogEvent events : logEvents) {
final Object[] entry = new Object[4];
entry[0] = events.getUser();
entry[1] = events.getTime();
entry[2] = events.getType();
entry[3] = events.getMessage();
eventData.add(entry);
}
ret.put("logData", eventData);
}
private List<String> getFlowJobTypes(final Flow flow) {
final Set<String> jobTypeSet = new HashSet<>();
for (final Node node : flow.getNodes()) {
jobTypeSet.add(node.getType());
}
final List<String> jobTypes = new ArrayList<>();
jobTypes.addAll(jobTypeSet);
return jobTypes;
}
private void ajaxFetchFlowDetails(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowName = getParam(req, "flow");
try {
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(ERROR_PARAM, "Flow " + flowName + " not found.");
return;
}
ret.put("jobTypes", getFlowJobTypes(flow));
if (flow.getCondition() != null) {
ret.put("condition", flow.getCondition());
}
} catch (final AccessControlException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
}
private void ajaxFetchLastSuccessfulFlowExecution(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowId = getParam(req, "flow");
List<ExecutableFlow> exFlows = null;
try {
exFlows =
this.executorManagerAdapter.getExecutableFlows(project.getId(), flowId, 0, 1,
Status.SUCCEEDED);
} catch (final ExecutorManagerException e) {
ret.put(ERROR_PARAM, "Error retrieving executable flows");
return;
}
if (exFlows.size() == 0) {
ret.put("success", "false");
ret.put("message", "This flow has no successful run.");
return;
}
ret.put("success", "true");
ret.put("message", "");
ret.put("execId", exFlows.get(0).getExecutionId());
}
private void ajaxFetchFlowExecutions(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowId = getParam(req, "flow");
final int from = Integer.valueOf(getParam(req, "start"));
final int length = Integer.valueOf(getParam(req, "length"));
final ArrayList<ExecutableFlow> exFlows = new ArrayList<>();
int total = 0;
try {
total =
this.executorManagerAdapter.getExecutableFlows(project.getId(), flowId, from,
length, exFlows);
} catch (final ExecutorManagerException e) {
ret.put(ERROR_PARAM, "Error retrieving executable flows");
}
ret.put("flow", flowId);
ret.put("total", total);
ret.put("from", from);
ret.put("length", length);
final ArrayList<Object> history = new ArrayList<>();
for (final ExecutableFlow flow : exFlows) {
final HashMap<String, Object> flowInfo = new HashMap<>();
flowInfo.put("execId", flow.getExecutionId());
flowInfo.put(FLOW_ID_PARAM, flow.getFlowId());
flowInfo.put("projectId", flow.getProjectId());
flowInfo.put("status", flow.getStatus().toString());
flowInfo.put("submitTime", flow.getSubmitTime());
flowInfo.put("startTime", flow.getStartTime());
flowInfo.put("endTime", flow.getEndTime());
flowInfo.put("submitUser", flow.getSubmitUser());
history.add(flowInfo);
}
ret.put("executions", history);
}
/**
* Download project zip file from DB and send it back client.
*
* This method requires a project name and an optional project version.
*/
private void handleDownloadProject(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final User user = session.getUser();
final String projectName = getParam(req, "project");
logger.info(user.getUserId() + " is downloading project: " + projectName);
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
this.setErrorMessageInCookie(resp, "Project " + projectName
+ " doesn't exist.");
resp.sendRedirect(req.getContextPath());
return;
}
if (!hasPermission(project, user, Type.READ)) {
this.setErrorMessageInCookie(resp, "No permission to download project " + projectName
+ ".");
resp.sendRedirect(req.getContextPath());
return;
}
int version = -1;
if (hasParam(req, "version")) {
version = getIntParam(req, "version");
}
ProjectFileHandler projectFileHandler = null;
FileInputStream inStream = null;
OutputStream outStream = null;
try {
projectFileHandler =
this.projectManager.getProjectFileHandler(project, version);
if (projectFileHandler == null) {
this.setErrorMessageInCookie(resp, "Project " + projectName
+ " with version " + version + " doesn't exist");
resp.sendRedirect(req.getContextPath());
return;
}
final File projectZipFile = projectFileHandler.getLocalFile();
final String logStr =
String.format(
"downloading project zip file for project \"%s\" at \"%s\""
+ " size: %d type: %s fileName: \"%s\"",
projectFileHandler.getFileName(),
projectZipFile.getAbsolutePath(), projectZipFile.length(),
projectFileHandler.getFileType(),
projectFileHandler.getFileName());
logger.info(logStr);
// now set up HTTP response for downloading file
inStream = new FileInputStream(projectZipFile);
resp.setContentType(APPLICATION_ZIP_MIME_TYPE);
final String headerKey = "Content-Disposition";
final String headerValue =
String.format("attachment; filename=\"%s\"",
projectFileHandler.getFileName());
resp.setHeader(headerKey, headerValue);
resp.setHeader("version",
Integer.toString(projectFileHandler.getVersion()));
resp.setHeader("projectId",
Integer.toString(projectFileHandler.getProjectId()));
outStream = resp.getOutputStream();
final byte[] buffer = new byte[this.downloadBufferSize];
int bytesRead = -1;
while ((bytesRead = inStream.read(buffer)) != -1) {
outStream.write(buffer, 0, bytesRead);
}
} catch (final Throwable e) {
logger.error(
"Encountered error while downloading project zip file for project: "
+ projectName + " by user: " + user.getUserId(), e);
throw new ServletException(e);
} finally {
IOUtils.closeQuietly(inStream);
IOUtils.closeQuietly(outStream);
if (projectFileHandler != null) {
projectFileHandler.deleteLocalFile();
}
}
}
/**
* validate readiness of a project and user permission and use projectManager to purge the project
* if things looks good
**/
private void handlePurgeProject(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final User user = session.getUser();
final HashMap<String, Object> ret = new HashMap<>();
boolean isOperationSuccessful = true;
try {
Project project = null;
final String projectParam = getParam(req, "project");
if (StringUtils.isNumeric(projectParam)) {
project = this.projectManager.getProject(Integer.parseInt(projectParam)); // get
// project
// by
// Id
} else {
project = this.projectManager.getProject(projectParam); // get project by
// name (name cannot
// start
// from ints)
}
// invalid project
if (project == null) {
ret.put(ERROR_PARAM, "invalid project");
isOperationSuccessful = false;
}
// project is already deleted
if (isOperationSuccessful
&& this.projectManager.isActiveProject(project.getId())) {
ret.put(ERROR_PARAM, "Project " + project.getName()
+ " should be deleted before purging");
isOperationSuccessful = false;
}
// only eligible users can purge a project
if (isOperationSuccessful && !hasPermission(project, user, Type.ADMIN)) {
ret.put(ERROR_PARAM, "Cannot purge. User '" + user.getUserId()
+ "' is not an ADMIN.");
isOperationSuccessful = false;
}
if (isOperationSuccessful) {
this.projectManager.purgeProject(project, user);
}
} catch (final Exception e) {
ret.put(ERROR_PARAM, e.getMessage());
isOperationSuccessful = false;
}
ret.put("success", isOperationSuccessful);
this.writeJSON(resp, ret);
}
private void removeAssociatedSchedules(final Project project) throws ServletException {
// remove regular schedules
try {
for (final Schedule schedule : this.scheduleManager.getSchedules()) {
if (schedule.getProjectId() == project.getId()) {
logger.info("removing schedule " + schedule.getScheduleId());
this.scheduleManager.removeSchedule(schedule);
}
}
} catch (final ScheduleManagerException e) {
throw new ServletException(e);
}
// remove flow trigger schedules
try {
if (this.enableQuartz) {
this.scheduler.unschedule(project);
}
} catch (final SchedulerException e) {
throw new ServletException(e);
}
}
private void handleRemoveProject(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final User user = session.getUser();
final String projectName = getParam(req, "project");
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
this.setErrorMessageInCookie(resp, "Project " + projectName
+ " doesn't exist.");
resp.sendRedirect(req.getContextPath());
return;
}
if (!hasPermission(project, user, Type.ADMIN)) {
this.setErrorMessageInCookie(resp,
"Cannot delete. User '" + user.getUserId() + "' is not an ADMIN.");
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
removeAssociatedSchedules(project);
try {
this.projectManager.removeProject(project, user);
} catch (final ProjectManagerException e) {
this.setErrorMessageInCookie(resp, e.getMessage());
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
return;
}
this.setSuccessMessageInCookie(resp, "Project '" + projectName
+ "' was successfully deleted and associated schedules are removed.");
resp.sendRedirect(req.getContextPath());
}
private void ajaxChangeDescription(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req, final User user)
throws ServletException {
final String description = getParam(req, "description");
project.setDescription(description);
try {
this.projectManager.updateProjectDescription(project, description, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
}
private void ajaxFetchJobInfo(final Project project, final HashMap<String, Object> ret,
final HttpServletRequest req) throws ServletException {
final String flowName = getParam(req, "flowName");
final String jobName = getParam(req, "jobName");
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(ERROR_PARAM,
"Flow " + flowName + " not found in project " + project.getName());
return;
}
final Node node = flow.getNode(jobName);
if (node == null) {
ret.put(ERROR_PARAM, "Job " + jobName + " not found in flow " + flowName);
return;
}
Props jobProp;
try {
jobProp = this.projectManager.getProperties(project, flow, jobName, node.getJobSource());
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, "Failed to retrieve job properties!");
return;
}
if (jobProp == null) {
jobProp = new Props();
}
Props overrideProp;
try {
overrideProp = this.projectManager
.getJobOverrideProperty(project, flow, jobName, node.getJobSource());
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, "Failed to retrieve job override properties!");
return;
}
ret.put("jobName", node.getId());
ret.put("jobType", jobProp.get("type"));
if (overrideProp == null) {
overrideProp = new Props(jobProp);
}
final Map<String, String> generalParams = new HashMap<>();
final Map<String, String> overrideParams = new HashMap<>();
for (final String ps : jobProp.getKeySet()) {
generalParams.put(ps, jobProp.getString(ps));
}
for (final String ops : overrideProp.getKeySet()) {
overrideParams.put(ops, overrideProp.getString(ops));
}
ret.put("generalParams", generalParams);
ret.put("overrideParams", overrideParams);
}
private void ajaxSetJobOverrideProperty(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req, final User user)
throws ServletException {
final String flowName = getParam(req, "flowName");
final String jobName = getParam(req, "jobName");
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(ERROR_PARAM,
"Flow " + flowName + " not found in project " + project.getName());
return;
}
final Node node = flow.getNode(jobName);
if (node == null) {
ret.put(ERROR_PARAM, "Job " + jobName + " not found in flow " + flowName);
return;
}
final Map<String, String> jobParamGroup = this.getParamGroup(req, "jobOverride");
final Props overrideParams = new Props(null, jobParamGroup);
try {
this.projectManager
.setJobOverrideProperty(project, flow, overrideParams, jobName, node.getJobSource(),
user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, "Failed to upload job override property");
}
}
private void ajaxFetchProjectFlows(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final ArrayList<Map<String, Object>> flowList =
new ArrayList<>();
for (final Flow flow : project.getFlows()) {
if (!flow.isEmbeddedFlow()) {
final HashMap<String, Object> flowObj = new HashMap<>();
flowObj.put(FLOW_ID_PARAM, flow.getId());
flowList.add(flowObj);
}
}
ret.put("flows", flowList);
}
private void ajaxFetchFlowGraph(final Project project, final HashMap<String, Object> ret,
final HttpServletRequest req) throws ServletException {
final String flowId = getParam(req, "flow");
fillFlowInfo(project, flowId, ret);
}
private void fillFlowInfo(final Project project, final String flowId,
final HashMap<String, Object> ret) {
final Flow flow = project.getFlow(flowId);
if (flow == null) {
ret.put(ERROR_PARAM,
"Flow " + flowId + " not found in project " + project.getName());
return;
}
final ArrayList<Map<String, Object>> nodeList =
new ArrayList<>();
for (final Node node : flow.getNodes()) {
final HashMap<String, Object> nodeObj = new HashMap<>();
nodeObj.put("id", node.getId());
nodeObj.put("type", node.getType());
if (node.getCondition() != null) {
nodeObj.put("condition", node.getCondition());
}
if (node.getEmbeddedFlowId() != null) {
nodeObj.put(FLOW_ID_PARAM, node.getEmbeddedFlowId());
fillFlowInfo(project, node.getEmbeddedFlowId(), nodeObj);
}
nodeList.add(nodeObj);
final Set<Edge> inEdges = flow.getInEdges(node.getId());
if (inEdges != null && !inEdges.isEmpty()) {
final ArrayList<String> inEdgesList = new ArrayList<>();
for (final Edge edge : inEdges) {
inEdgesList.add(edge.getSourceId());
}
Collections.sort(inEdgesList);
nodeObj.put("in", inEdgesList);
}
}
Collections.sort(nodeList, new Comparator<Map<String, Object>>() {
@Override
public int compare(final Map<String, Object> o1, final Map<String, Object> o2) {
final String id = (String) o1.get("id");
return id.compareTo((String) o2.get("id"));
}
});
ret.put("flow", flowId);
ret.put("nodes", nodeList);
}
private void ajaxFetchFlowNodeData(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowId = getParam(req, "flow");
final Flow flow = project.getFlow(flowId);
final String nodeId = getParam(req, "node");
final Node node = flow.getNode(nodeId);
if (node == null) {
ret.put(ERROR_PARAM, "Job " + nodeId + " doesn't exist.");
return;
}
ret.put("id", nodeId);
ret.put("flow", flowId);
ret.put("type", node.getType());
final Props jobProps;
try {
jobProps = this.projectManager.getProperties(project, flow, nodeId, node.getJobSource());
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, "Failed to upload job override property for " + nodeId);
return;
}
if (jobProps == null) {
ret.put(ERROR_PARAM, "Properties for " + nodeId + " isn't found.");
return;
}
final Map<String, String> properties = PropsUtils.toStringMap(jobProps, true);
ret.put("props", properties);
if (node.getType().equals("flow")) {
if (node.getEmbeddedFlowId() != null) {
fillFlowInfo(project, node.getEmbeddedFlowId(), ret);
}
}
}
private void ajaxFetchFlow(final Project project, final HashMap<String, Object> ret,
final HttpServletRequest req) throws ServletException {
final String flowId = getParam(req, "flow");
final Flow flow = project.getFlow(flowId);
final ArrayList<Node> flowNodes = new ArrayList<>(flow.getNodes());
Collections.sort(flowNodes, NODE_LEVEL_COMPARATOR);
final ArrayList<Object> nodeList = new ArrayList<>();
for (final Node node : flowNodes) {
final HashMap<String, Object> nodeObj = new HashMap<>();
nodeObj.put("id", node.getId());
final ArrayList<String> dependencies = new ArrayList<>();
Collection<Edge> collection = flow.getInEdges(node.getId());
if (collection != null) {
for (final Edge edge : collection) {
dependencies.add(edge.getSourceId());
}
}
final ArrayList<String> dependents = new ArrayList<>();
collection = flow.getOutEdges(node.getId());
if (collection != null) {
for (final Edge edge : collection) {
dependents.add(edge.getTargetId());
}
}
nodeObj.put("dependencies", dependencies);
nodeObj.put("dependents", dependents);
nodeObj.put("level", node.getLevel());
nodeList.add(nodeObj);
}
ret.put(FLOW_ID_PARAM, flowId);
ret.put("nodes", nodeList);
ret.put(FLOW_IS_LOCKED_PARAM, flow.isLocked());
}
private void ajaxAddProxyUser(final Project project, final HashMap<String, Object> ret,
final HttpServletRequest req, final User user) throws ServletException {
final String name = getParam(req, "name");
logger.info("Adding proxy user " + name + " by " + user.getUserId());
if (this.userManager.validateProxyUser(name, user)) {
try {
this.projectManager.addProjectProxyUser(project, name, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
} else {
ret.put(ERROR_PARAM, "User " + user.getUserId()
+ " has no permission to add " + name + " as proxy user.");
return;
}
}
private void ajaxRemoveProxyUser(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req, final User user)
throws ServletException {
final String name = getParam(req, "name");
logger.info("Removing proxy user " + name + " by " + user.getUserId());
try {
this.projectManager.removeProjectProxyUser(project, name, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
}
private void ajaxAddPermission(final Project project, final HashMap<String, Object> ret,
final HttpServletRequest req, final User user) throws ServletException {
final String name = getParam(req, "name");
final boolean group = Boolean.parseBoolean(getParam(req, "group"));
if (group) {
if (project.getGroupPermission(name) != null) {
ret.put(ERROR_PARAM, "Group permission already exists.");
return;
}
if (!this.userManager.validateGroup(name)) {
ret.put(ERROR_PARAM, "Group is invalid.");
return;
}
} else {
if (project.getUserPermission(name) != null) {
ret.put(ERROR_PARAM, "User permission already exists.");
return;
}
if (!this.userManager.validateUser(name)) {
ret.put(ERROR_PARAM, "User is invalid.");
return;
}
}
final boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
final boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
final boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
final boolean execute =
Boolean.parseBoolean(getParam(req, "permissions[execute]"));
final boolean schedule =
Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
final Permission perm = new Permission();
if (admin) {
perm.setPermission(Type.ADMIN, true);
} else {
perm.setPermission(Type.READ, read);
perm.setPermission(Type.WRITE, write);
perm.setPermission(Type.EXECUTE, execute);
perm.setPermission(Type.SCHEDULE, schedule);
}
try {
this.projectManager.updateProjectPermission(project, name, perm, group, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
}
private void ajaxChangePermissions(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req, final User user)
throws ServletException {
final boolean admin = Boolean.parseBoolean(getParam(req, "permissions[admin]"));
final boolean read = Boolean.parseBoolean(getParam(req, "permissions[read]"));
final boolean write = Boolean.parseBoolean(getParam(req, "permissions[write]"));
final boolean execute =
Boolean.parseBoolean(getParam(req, "permissions[execute]"));
final boolean schedule =
Boolean.parseBoolean(getParam(req, "permissions[schedule]"));
final boolean group = Boolean.parseBoolean(getParam(req, "group"));
final String name = getParam(req, "name");
final Permission perm;
if (group) {
perm = project.getGroupPermission(name);
} else {
perm = project.getUserPermission(name);
}
if (perm == null) {
ret.put(ERROR_PARAM, "Permissions for " + name + " cannot be found.");
return;
}
if (admin || read || write || execute || schedule) {
if (admin) {
perm.setPermission(Type.ADMIN, true);
perm.setPermission(Type.READ, false);
perm.setPermission(Type.WRITE, false);
perm.setPermission(Type.EXECUTE, false);
perm.setPermission(Type.SCHEDULE, false);
} else {
perm.setPermission(Type.ADMIN, false);
perm.setPermission(Type.READ, read);
perm.setPermission(Type.WRITE, write);
perm.setPermission(Type.EXECUTE, execute);
perm.setPermission(Type.SCHEDULE, schedule);
}
try {
this.projectManager
.updateProjectPermission(project, name, perm, group, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
} else {
try {
this.projectManager.removeProjectPermission(project, name, group, user);
} catch (final ProjectManagerException e) {
ret.put(ERROR_PARAM, e.getMessage());
}
}
}
/**
* this only returns user permissions, but not group permissions and proxy users
*/
private void ajaxGetPermissions(final Project project, final HashMap<String, Object> ret) {
final ArrayList<HashMap<String, Object>> permissions =
new ArrayList<>();
for (final Pair<String, Permission> perm : project.getUserPermissions()) {
final HashMap<String, Object> permObj = new HashMap<>();
final String userId = perm.getFirst();
permObj.put("username", userId);
permObj.put("permission", perm.getSecond().toStringArray());
permissions.add(permObj);
}
ret.put("permissions", permissions);
}
private void ajaxGetGroupPermissions(final Project project,
final HashMap<String, Object> ret) {
final ArrayList<HashMap<String, Object>> permissions =
new ArrayList<>();
for (final Pair<String, Permission> perm : project.getGroupPermissions()) {
final HashMap<String, Object> permObj = new HashMap<>();
final String userId = perm.getFirst();
permObj.put("username", userId);
permObj.put("permission", perm.getSecond().toStringArray());
permissions.add(permObj);
}
ret.put("permissions", permissions);
}
private void ajaxGetProxyUsers(final Project project, final HashMap<String, Object> ret) {
final String[] proxyUsers = project.getProxyUsers().toArray(new String[0]);
ret.put("proxyUsers", proxyUsers);
}
private void ajaxCheckForWritePermission(final Project project, final User user,
final HashMap<String, Object> ret) {
ret.put("hasWritePermission", hasPermission(project, user, Type.WRITE));
}
/**
* Set if a flow is locked.
*
* @param project the project for the flow.
* @param ret the return value.
* @param req the http request.
*/
private void ajaxSetFlowLock(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowName = getParam(req, FLOW_NAME_PARAM);
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(ERROR_PARAM,
"Flow " + flowName + " not found in project " + project.getName());
return;
}
final boolean isLocked = Boolean.parseBoolean(getParam(req, FLOW_IS_LOCKED_PARAM));
String flowLockErrorMessage = null;
try {
flowLockErrorMessage = getParam(req, FLOW_LOCK_ERROR_MESSAGE_PARAM);
} catch(final Exception e) {
logger.info("Unable to get flow lock error message");
}
// if there is a change in the locked value, then check to see if the project has a flow trigger
// that needs to be paused/resumed.
if (isLocked != flow.isLocked()) {
try {
if (this.projectManager.hasFlowTrigger(project, flow)) {
if (isLocked) {
if (this.scheduler.pauseFlowTriggerIfPresent(project.getId(), flow.getId())) {
logger.info("Flow trigger for flow " + project.getName() + "." + flow.getId() +
" is paused");
} else {
logger.warn("Flow trigger for flow " + project.getName() + "." + flow.getId() +
" doesn't exist");
}
} else {
if (this.scheduler.resumeFlowTriggerIfPresent(project.getId(), flow.getId())) {
logger.info("Flow trigger for flow " + project.getName() + "." + flow.getId() +
" is resumed");
} else {
logger.warn("Flow trigger for flow " + project.getName() + "." + flow.getId() +
" doesn't exist");
}
}
}
} catch (final Exception e) {
ret.put(ERROR_PARAM, e);
}
}
flow.setLocked(isLocked);
flow.setFlowLockErrorMessage(isLocked ? flowLockErrorMessage : null);
ret.put(FLOW_IS_LOCKED_PARAM, flow.isLocked());
ret.put(FLOW_ID_PARAM, flow.getId());
ret.put(FLOW_LOCK_ERROR_MESSAGE_PARAM, flow.getFlowLockErrorMessage());
this.projectManager.updateFlow(project, flow);
}
/**
* Returns true if the flow is locked, false if it is unlocked.
*
* @param project the project containing the flow.
* @param ret the return value.
* @param req the http request.
*/
private void ajaxIsFlowLocked(final Project project,
final HashMap<String, Object> ret, final HttpServletRequest req)
throws ServletException {
final String flowName = getParam(req, FLOW_NAME_PARAM);
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(ERROR_PARAM,
"Flow " + flowName + " not found in project " + project.getName());
return;
}
ret.put(FLOW_ID_PARAM, flow.getId());
ret.put(FLOW_IS_LOCKED_PARAM, flow.isLocked());
}
private void handleProjectLogsPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectlogpage.vm");
final String projectName = getParam(req, "project");
final User user = session.getUser();
PageUtils
.hideUploadButtonWhenNeeded(page, session, this.userManager, this.lockdownUploadProjects);
Project project = null;
try {
project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " doesn't exist.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
final Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
final boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
if (adminPerm) {
page.add("admin", true);
}
// Set this so we can display execute buttons only to those who have
// access.
if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
page.add("exec", true);
} else {
page.add("exec", false);
}
}
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleJobHistoryPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobhistorypage.vm");
final String jobId = getParam(req, "job");
page.add("jobId", jobId);
int pageNum = Math.max(1, getIntParam(req, "page", 1));
page.add("page", pageNum);
final int pageSize = Math.max(1, getIntParam(req, "size", 25));
page.add("pageSize", pageSize);
page.add("recordCount", 0);
page.add("projectId", "");
page.add("projectName", "");
page.add("dataSeries", "[]");
page.add("history", null);
final String projectName = getParam(req, "project");
final User user = session.getUser();
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " doesn't exist.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
page.add("errorMsg", "No permission to view project " + projectName + ".");
page.render();
return;
}
page.add("projectId", project.getId());
page.add("projectName", project.getName());
try {
final int numResults = this.executorManagerAdapter.getNumberOfJobExecutions(project, jobId);
page.add("recordCount", numResults);
final int totalPages = ((numResults - 1) / pageSize) + 1;
if (pageNum > totalPages) {
pageNum = totalPages;
page.add("page", pageNum);
}
final int elementsToSkip = (pageNum - 1) * pageSize;
final List<ExecutableJobInfo> jobInfo =
this.executorManagerAdapter.getExecutableJobs(project, jobId, elementsToSkip, pageSize);
if (CollectionUtils.isNotEmpty(jobInfo)) {
page.add("history", jobInfo);
final ArrayList<Object> dataSeries = new ArrayList<>();
for (final ExecutableJobInfo info : jobInfo) {
final Map<String, Object> map = info.toObject();
dataSeries.add(map);
}
page.add("dataSeries", JSONUtils.toJSON(dataSeries));
}
} catch (final ExecutorManagerException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handlePermissionPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/permissionspage.vm");
final String projectName = getParam(req, "project");
final User user = session.getUser();
PageUtils
.hideUploadButtonWhenNeeded(page, session, this.userManager, this.lockdownUploadProjects);
Project project = null;
try {
project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("username", user.getUserId());
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
final Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
if (perm.isPermissionSet(Type.ADMIN)) {
page.add("admin", true);
}
final List<Pair<String, Permission>> userPermission =
project.getUserPermissions();
if (userPermission != null && !userPermission.isEmpty()) {
page.add("permissions", userPermission);
}
final List<Pair<String, Permission>> groupPermission =
project.getGroupPermissions();
if (groupPermission != null && !groupPermission.isEmpty()) {
page.add("groupPermissions", groupPermission);
}
final Set<String> proxyUsers = project.getProxyUsers();
if (proxyUsers != null && !proxyUsers.isEmpty()) {
page.add("proxyUsers", proxyUsers);
}
if (hasPermission(project, user, Type.ADMIN)) {
page.add("isAdmin", true);
}
}
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleJobPage(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/jobpage.vm");
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
final String jobName = getParam(req, "job");
final User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = this.projectManager.getProject(projectName);
logger.info("JobPage: project " + projectName + " version is " + project.getVersion()
+ ", reference is " + System.identityHashCode(project));
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
page.render();
return;
}
page.add("flowid", flow.getId());
final Node node = flow.getNode(jobName);
if (node == null) {
page.add("errorMsg", "Job " + jobName + " not found.");
page.render();
return;
}
Props jobProp = this.projectManager
.getJobOverrideProperty(project, flow, jobName, node.getJobSource());
if (jobProp == null) {
jobProp = this.projectManager.getProperties(project, flow, jobName, node.getJobSource());
}
page.add("jobid", node.getId());
page.add("jobtype", node.getType());
if (node.getCondition() != null) {
page.add("condition", node.getCondition());
}
final ArrayList<String> dependencies = new ArrayList<>();
final Set<Edge> inEdges = flow.getInEdges(node.getId());
if (inEdges != null) {
for (final Edge dependency : inEdges) {
dependencies.add(dependency.getSourceId());
}
}
if (!dependencies.isEmpty()) {
page.add("dependencies", dependencies);
}
final ArrayList<String> dependents = new ArrayList<>();
final Set<Edge> outEdges = flow.getOutEdges(node.getId());
if (outEdges != null) {
for (final Edge dependent : outEdges) {
dependents.add(dependent.getTargetId());
}
}
if (!dependents.isEmpty()) {
page.add("dependents", dependents);
}
// Resolve property dependencies
final ArrayList<String> source = new ArrayList<>();
final String nodeSource = node.getPropsSource();
if (nodeSource != null) {
source.add(nodeSource);
FlowProps parent = flow.getFlowProps(nodeSource);
while (parent.getInheritedSource() != null) {
source.add(parent.getInheritedSource());
parent = flow.getFlowProps(parent.getInheritedSource());
}
}
if (!source.isEmpty()) {
page.add("properties", source);
}
final ArrayList<Pair<String, String>> parameters =
new ArrayList<>();
// Parameter
for (final String key : jobProp.getKeySet()) {
final String value = jobProp.get(key);
parameters.add(new Pair<>(key, value));
}
page.add("parameters", parameters);
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
} catch (final ProjectManagerException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handlePropertyPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/propertypage.vm");
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
final String jobName = getParam(req, "job");
final String propSource = getParam(req, "prop");
final User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
logger.info("Display project property. Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
logger.info("Display project property. Project " + projectName +
" Flow " + flowName + " not found.");
page.render();
return;
}
page.add("flowid", flow.getId());
final Node node = flow.getNode(jobName);
if (node == null) {
page.add("errorMsg", "Job " + jobName + " not found.");
logger.info("Display project property. Project " + projectName +
" Flow " + flowName + " Job " + jobName + " not found.");
page.render();
return;
}
final Props prop = this.projectManager.getProperties(project, flow, null, propSource);
if (prop == null) {
page.add("errorMsg", "Property " + propSource + " not found.");
logger.info("Display project property. Project " + projectName +
" Flow " + flowName + " Job " + jobName +
" Property " + propSource + " not found.");
page.render();
return;
}
page.add("property", propSource);
page.add("jobid", node.getId());
// Resolve property dependencies
final ArrayList<String> inheritProps = new ArrayList<>();
FlowProps parent = flow.getFlowProps(propSource);
while (parent.getInheritedSource() != null) {
inheritProps.add(parent.getInheritedSource());
parent = flow.getFlowProps(parent.getInheritedSource());
}
if (!inheritProps.isEmpty()) {
page.add("inheritedproperties", inheritProps);
}
final ArrayList<String> dependingProps = new ArrayList<>();
FlowProps child =
flow.getFlowProps(flow.getNode(jobName).getPropsSource());
while (!child.getSource().equals(propSource)) {
dependingProps.add(child.getSource());
child = flow.getFlowProps(child.getInheritedSource());
}
if (!dependingProps.isEmpty()) {
page.add("dependingproperties", dependingProps);
}
final ArrayList<Pair<String, String>> parameters =
new ArrayList<>();
// Parameter
for (final String key : prop.getKeySet()) {
final String value = prop.get(key);
parameters.add(new Pair<>(key, value));
}
page.add("parameters", parameters);
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
} catch (final ProjectManagerException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleFlowPage(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/flowpage.vm");
final String projectName = getParam(req, "project");
final String flowName = getParam(req, "flow");
final User user = session.getUser();
Project project = null;
Flow flow = null;
try {
project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
page.render();
return;
}
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission Project " + projectName
+ ".");
}
page.add("project", project);
flow = project.getFlow(flowName);
if (flow == null) {
page.add("errorMsg", "Flow " + flowName + " not found.");
} else {
page.add("flowid", flow.getId());
page.add("isLocked", flow.isLocked());
if (flow.isLocked()) {
final Props props = this.projectManager.getProps();
final String flowLockErrorMessage = flow.getFlowLockErrorMessage();
final String lockedFlowMsg = flowLockErrorMessage != null ? flowLockErrorMessage :
String.format(props.getString(ConfigurationKeys
.AZKABAN_LOCKED_FLOW_ERROR_MESSAGE, Constants.DEFAULT_LOCKED_FLOW_ERROR_MESSAGE),
flow.getId(), projectName);
page.add("error_message", lockedFlowMsg);
}
}
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleProjectPage(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/projectpage.vm");
final String projectName = getParam(req, "project");
final User user = session.getUser();
PageUtils
.hideUploadButtonWhenNeeded(page, session, this.userManager, this.lockdownUploadProjects);
Project project = null;
try {
project = this.projectManager.getProject(projectName);
if (project == null) {
page.add("errorMsg", "Project " + projectName + " not found.");
} else {
if (!hasPermission(project, user, Type.READ)) {
throw new AccessControlException("No permission to view project "
+ projectName + ".");
}
page.add("project", project);
page.add("admins", Utils.flattenToString(
project.getUsersWithPermission(Type.ADMIN), ","));
final Permission perm = this.getPermissionObject(project, user, Type.ADMIN);
page.add("userpermission", perm);
page.add(
"validatorFixPrompt",
this.projectManager.getProps().getBoolean(
ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_FLAG_PARAM,
ValidatorConfigs.DEFAULT_VALIDATOR_AUTO_FIX_PROMPT_FLAG));
page.add(
"validatorFixLabel",
this.projectManager.getProps().get(
ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_LABEL_PARAM));
page.add(
"validatorFixLink",
this.projectManager.getProps().get(
ValidatorConfigs.VALIDATOR_AUTO_FIX_PROMPT_LINK_PARAM));
final boolean adminPerm = perm.isPermissionSet(Type.ADMIN);
if (adminPerm) {
page.add("admin", true);
}
// Set this so we can display execute buttons only to those who have
// access.
if (perm.isPermissionSet(Type.EXECUTE) || adminPerm) {
page.add("exec", true);
} else {
page.add("exec", false);
}
final List<Flow> flows = project.getFlows().stream().filter(flow -> !flow.isEmbeddedFlow())
.collect(Collectors.toList());
if (!flows.isEmpty()) {
Collections.sort(flows, FLOW_ID_COMPARATOR);
page.add("flows", flows);
}
}
} catch (final AccessControlException e) {
page.add("errorMsg", e.getMessage());
}
page.render();
}
private void handleCreate(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException {
final String projectName = hasParam(req, "name") ? getParam(req, "name") : null;
final String projectDescription =
hasParam(req, "description") ? getParam(req, "description") : null;
logger.info("Create project " + projectName);
final User user = session.getUser();
String status = null;
String action = null;
String message = null;
HashMap<String, Object> params = null;
if (this.lockdownCreateProjects &&
!UserUtils.hasPermissionforAction(this.userManager, user, Type.CREATEPROJECTS)) {
message =
"User " + user.getUserId()
+ " doesn't have permission to create projects.";
logger.info(message);
status = ERROR_PARAM;
} else {
try {
this.projectManager.createProject(projectName, projectDescription, user);
status = "success";
action = "redirect";
final String redirect = "manager?project=" + projectName;
params = new HashMap<>();
params.put("path", redirect);
} catch (final ProjectManagerException e) {
message = e.getMessage();
status = ERROR_PARAM;
}
}
final String response = AbstractAzkabanServlet
.createJsonResponse(status, message, action, params);
try {
final Writer write = resp.getWriter();
write.append(response);
write.flush();
} catch (final IOException e) {
e.printStackTrace();
}
}
private void registerError(final Map<String, String> ret, final String error,
final HttpServletResponse resp, final int returnCode) {
ret.put(ERROR_PARAM, error);
resp.setStatus(returnCode);
}
private void ajaxHandleUpload(final HttpServletRequest req, final HttpServletResponse resp,
final Map<String, String> ret, final Map<String, Object> multipart, final Session session)
throws ServletException, IOException {
final User user = session.getUser();
final String projectName = (String) multipart.get("project");
// Fetch the uploader's IP
String uploaderIPAddr = WebUtils.getRealClientIpAddr(req);
final Project project = validateUploadAndGetProject(resp, ret, user, projectName);
if (project == null) {
return;
}
final FileItem item = (FileItem) multipart.get("file");
final String name = item.getName();
final String lowercaseExtension = FilenameUtils.getExtension(name).toLowerCase();
final Boolean hasZipExtension = lowercaseExtension.equals("zip");
final String contentType = item.getContentType();
if (contentType == null || !hasZipExtension ||
(!contentType.startsWith(APPLICATION_ZIP_MIME_TYPE) &&
!contentType.startsWith("application/x-zip-compressed") &&
!contentType.startsWith("application/octet-stream"))) {
item.delete();
if (!hasZipExtension) {
registerError(ret, "File extension '" + lowercaseExtension + "' unrecognized.", resp,
HttpServletResponse.SC_BAD_REQUEST);
} else {
registerError(ret, "Content type '" + contentType + "' does not match extension '" + lowercaseExtension + "'", resp,
HttpServletResponse.SC_BAD_REQUEST);
}
return;
}
final String autoFix = (String) multipart.get("fix");
final Props props = new Props();
if (autoFix != null && autoFix.equals("off")) {
props.put(ValidatorConfigs.CUSTOM_AUTO_FIX_FLAG_PARAM, "false");
} else {
props.put(ValidatorConfigs.CUSTOM_AUTO_FIX_FLAG_PARAM, "true");
}
ret.put("projectId", String.valueOf(project.getId()));
final File tempDir = Utils.createTempDir();
OutputStream out = null;
try {
logger.info("Uploading file to web server " + name);
final File archiveFile = new File(tempDir, name);
out = new BufferedOutputStream(new FileOutputStream(archiveFile));
IOUtils.copy(item.getInputStream(), out);
out.close();
if (this.enableQuartz) {
//todo chengren311: should maintain atomicity,
// e.g, if uploadProject fails, associated schedule shouldn't be added.
this.scheduler.unschedule(project);
}
// get the locked flows for the project, so that they can be locked again after upload
final List<String> lockedFlows = getLockedFlows(project);
final Map<String, ValidationReport> reports = this.projectManager
.uploadProject(project, archiveFile, lowercaseExtension, user, props, uploaderIPAddr);
if (this.enableQuartz) {
this.scheduler.schedule(project, user.getUserId());
}
// reset locks for flows as needed
lockFlowsForProject(project, lockedFlows);
// remove schedule of renamed/deleted flows
removeScheduleOfDeletedFlows(project, this.scheduleManager, (schedule) -> {
logger.info(
"Removed schedule with id {} of renamed/deleted flow: {} from project: {}.",
schedule.getScheduleId(), schedule.getFlowName(), schedule.getProjectName());
this.projectManager.postProjectEvent(project, EventType.SCHEDULE, "azkaban",
"Schedule " + schedule.toString() + " has been removed.");
});
registerErrorsAndWarningsFromValidationReport(resp, ret, reports);
} catch (final Exception e) {
logger.info("Installation Failed.", e);
String error = e.getMessage();
if (error.length() > 512) {
error = error.substring(0, 512) + "<br>Too many errors to display.<br>";
}
registerError(ret, "Installation Failed.<br>" + error, resp,
HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
} finally {
if (out != null) {
out.close();
}
if (tempDir.exists()) {
FileUtils.deleteDirectory(tempDir);
}
}
logger.info("Upload: project " + projectName + " version is " + project.getVersion()
+ ", reference is " + System.identityHashCode(project));
ret.put("version", String.valueOf(project.getVersion()));
}
/**
* @return project. Null if invalid upload params or not enough permissions to proceed.
*/
private Project validateUploadAndGetProject(final HttpServletResponse resp,
final Map<String, String> ret, final User user, final String projectName) {
if (projectName == null || projectName.isEmpty()) {
registerError(ret, "No project name found.", resp, HttpServletResponse.SC_BAD_REQUEST);
return null;
}
final Project project = this.projectManager.getProject(projectName);
if (project == null || !project.isActive()) {
final String failureCause = (project == null) ? "doesn't exist." : "was already removed.";
registerError(ret, "Installation Failed. Project '" + projectName + " "
+ failureCause, resp, HttpServletResponse.SC_GONE);
return null;
}
logger.info(
"Upload: reference of project " + projectName + " is " + System.identityHashCode(project));
if (this.lockdownUploadProjects && !UserUtils
.hasPermissionforAction(this.userManager, user, Type.UPLOADPROJECTS)) {
final String message =
"Project uploading is locked out. Only admin users and users with special permissions can upload projects. "
+ "User " + user.getUserId() + " doesn't have permission to upload project.";
logger.info(message);
registerError(ret, message, resp, HttpServletResponse.SC_FORBIDDEN);
return null;
}
if (!hasPermission(project, user, Type.WRITE)) {
registerError(ret,
"Installation Failed. User '" + user.getUserId() + "' does not have write access.",
resp, HttpServletResponse.SC_BAD_REQUEST);
return null;
}
return project;
}
/**
* Remove schedule of renamed/deleted flows
*
* @param project project from which old flows will be unscheduled
* @param scheduleManager the schedule manager
* @param onDeletedSchedule a callback function to execute with every deleted schedule
*/
static void removeScheduleOfDeletedFlows(final Project project,
final ScheduleManager scheduleManager, final Consumer<Schedule> onDeletedSchedule)
throws ScheduleManagerException {
final Set<String> flowNameList = project.getFlows().stream().map(f -> f.getId()).collect(
Collectors.toSet());
for (final Schedule schedule : scheduleManager.getSchedules()) {
if (schedule.getProjectId() == project.getId() &&
!flowNameList.contains(schedule.getFlowName())) {
scheduleManager.removeSchedule(schedule);
onDeletedSchedule.accept(schedule);
}
}
}
private void registerErrorsAndWarningsFromValidationReport(final HttpServletResponse resp,
final Map<String, String> ret, final Map<String, ValidationReport> reports) {
final StringBuffer errorMsgs = new StringBuffer();
final StringBuffer warnMsgs = new StringBuffer();
for (final Entry<String, ValidationReport> reportEntry : reports.entrySet()) {
final ValidationReport report = reportEntry.getValue();
for (final String msg : report.getInfoMsgs()) {
switch (ValidationReport.getInfoMsgLevel(msg)) {
case ERROR:
errorMsgs.append(ValidationReport.getInfoMsg(msg) + "<br/>");
break;
case WARN:
warnMsgs.append(ValidationReport.getInfoMsg(msg) + "<br/>");
break;
default:
break;
}
}
if (!report.getErrorMsgs().isEmpty()) {
errorMsgs.append("Validator " + reportEntry.getKey() + " reports errors:<br><br>");
for (final String msg : report.getErrorMsgs()) {
errorMsgs.append(msg + "<br>");
}
}
if (!report.getWarningMsgs().isEmpty()) {
warnMsgs.append("Validator " + reportEntry.getKey() + " reports warnings:<br><br>");
for (final String msg : report.getWarningMsgs()) {
warnMsgs.append(msg + "<br>");
}
}
}
if (errorMsgs.length() > 0) {
// If putting more than 4000 characters in the cookie, the entire message will somehow
// get discarded.
registerError(ret,
errorMsgs.length() > 4000 ? errorMsgs.substring(0, 4000) : errorMsgs.toString(), resp,
HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
if (warnMsgs.length() > 0) {
ret.put("warn", warnMsgs.length() > 4000 ? warnMsgs.substring(0, 4000) : warnMsgs.toString());
}
}
/**
* @return the list of locked flows for the specified project.
*/
private List<String> getLockedFlows(final Project project) {
final List<Flow> flows = project.getFlows();
return flows.stream().filter(flow -> flow.isLocked()).map(flow -> flow.getId())
.collect(Collectors.toList());
}
/**
* Lock the specified flows for the project.
*
* @param project the project
* @param lockedFlows list of flow IDs of flows to lock
*/
private void lockFlowsForProject(final Project project, final List<String> lockedFlows) {
for (final String flowId : lockedFlows) {
final Flow flow = project.getFlow(flowId);
if (flow != null) {
flow.setLocked(true);
}
}
}
private void handleUpload(final HttpServletRequest req, final HttpServletResponse resp,
final Map<String, Object> multipart, final Session session) throws ServletException,
IOException {
final HashMap<String, String> ret = new HashMap<>();
final String projectName = (String) multipart.get("project");
ajaxHandleUpload(req, resp, ret, multipart, session);
if (ret.containsKey(ERROR_PARAM)) {
setErrorMessageInCookie(resp, ret.get(ERROR_PARAM));
}
if (ret.containsKey("warn")) {
setWarnMessageInCookie(resp, ret.get("warn"));
}
resp.sendRedirect(req.getRequestURI() + "?project=" + projectName);
}
private Permission getPermissionObject(final Project project, final User user,
final Permission.Type type) {
final Permission perm = project.getCollectivePermission(user);
for (final String roleName : user.getRoles()) {
final Role role = this.userManager.getRole(roleName);
perm.addPermissions(role.getPermission());
}
return perm;
}
private void handleReloadProjectWhitelist(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws IOException {
final HashMap<String, Object> ret = new HashMap<>();
if (hasPermission(session.getUser(), Permission.Type.ADMIN)) {
try {
if (this.projectManager.loadProjectWhiteList()) {
ret.put("success", "Project whitelist re-loaded!");
} else {
ret.put(ERROR_PARAM, "azkaban.properties doesn't contain property "
+ ProjectWhitelist.XML_FILE_PARAM);
}
} catch (final Exception e) {
ret.put(ERROR_PARAM,
"Exception occurred while trying to re-load project whitelist: "
+ e);
}
} else {
ret.put(ERROR_PARAM, "Provided session doesn't have admin privilege.");
}
this.writeJSON(resp, ret);
}
protected boolean hasPermission(final User user, final Permission.Type type) {
for (final String roleName : user.getRoles()) {
final Role role = this.userManager.getRole(roleName);
if (role.getPermission().isPermissionSet(type)
|| role.getPermission().isPermissionSet(Permission.Type.ADMIN)) {
return true;
}
}
return false;
}
private static class NodeLevelComparator implements Comparator<Node> {
@Override
public int compare(final Node node1, final Node node2) {
return node1.getLevel() - node2.getLevel();
}
}
public static class PageSelection {
private final String page;
private final int size;
private final boolean disabled;
private final int nextPage;
private boolean selected;
public PageSelection(final String pageName, final int size, final boolean disabled,
final boolean selected, final int nextPage) {
this.page = pageName;
this.size = size;
this.disabled = disabled;
this.setSelected(selected);
this.nextPage = nextPage;
}
public String getPage() {
return this.page;
}
public int getSize() {
return this.size;
}
public boolean getDisabled() {
return this.disabled;
}
public boolean isSelected() {
return this.selected;
}
public void setSelected(final boolean selected) {
this.selected = selected;
}
public int getNextPage() {
return this.nextPage;
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/ProjectServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
import azkaban.server.session.Session;
import azkaban.user.Permission;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.user.UserUtils;
import azkaban.utils.Pair;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
/**
* The main page
*/
public class ProjectServlet extends LoginAbstractAzkabanServlet {
private static final Logger logger = Logger.getLogger(ProjectServlet.class
.getName());
private static final String LOCKDOWN_CREATE_PROJECTS_KEY =
"lockdown.create.projects";
private static final long serialVersionUID = -1;
private UserManager userManager;
private boolean lockdownCreateProjects = false;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.userManager = server.getUserManager();
this.lockdownCreateProjects =
server.getServerProps().getBoolean(LOCKDOWN_CREATE_PROJECTS_KEY, false);
if (this.lockdownCreateProjects) {
logger.info("Creation of projects is locked down");
}
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
final ProjectManager manager =
((AzkabanWebServer) getApplication()).getProjectManager();
if (hasParam(req, "ajax")) {
handleAjaxAction(req, resp, session, manager);
} else if (hasParam(req, "doaction")) {
handleDoAction(req, resp, session);
} else {
handlePageRender(req, resp, session, manager);
}
}
/**
* ProjectServlet class now handles ajax requests. It returns a
*
* @SimplifiedProject object: information regarding projects, and information regarding user and
* project association
*/
private void handleAjaxAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session, final ProjectManager manager)
throws ServletException, IOException {
final String ajaxName = getParam(req, "ajax");
final HashMap<String, Object> ret = new HashMap<>();
if (ajaxName.equals("fetchallprojects")) {
final List<Project> projects = manager.getProjects();
final List<SimplifiedProject> simplifiedProjects =
toSimplifiedProjects(projects);
ret.put("projects", simplifiedProjects);
} else if (ajaxName.equals("fetchuserprojects")) {
handleFetchUserProjects(req, session, manager, ret);
}
this.writeJSON(resp, ret);
}
/**
* We know the intention of API call is to return project ownership based on given user. <br> If
* user provides an user name, the method honors it <br> If user provides an empty user name, the
* user defaults to the session user<br> If user does not provide the user param, the user also
* defaults to the session user<br>
*/
private void handleFetchUserProjects(final HttpServletRequest req, final Session session,
final ProjectManager manager, final HashMap<String, Object> ret)
throws ServletException {
User user = null;
// if key "user" is specified, follow this logic
if (hasParam(req, "user")) {
final String userParam = getParam(req, "user");
if (userParam.isEmpty()) {
user = session.getUser();
} else {
user = new User(userParam);
}
} else {
// if key "user" is not specified, default to the session user
user = session.getUser();
}
final List<Project> projects = manager.getUserProjects(user);
final List<SimplifiedProject> simplifiedProjects = toSimplifiedProjects(projects);
ret.put("projects", simplifiedProjects);
}
/**
* A simple helper method that converts a List<Project> to List<SimplifiedProject>
*/
private List<SimplifiedProject> toSimplifiedProjects(final List<Project> projects) {
final List<SimplifiedProject> simplifiedProjects = new ArrayList<>();
for (final Project p : projects) {
final SimplifiedProject sp =
new SimplifiedProject(p.getId(), p.getName(),
p.getLastModifiedUser(), p.getCreateTimestamp(),
p.getUserPermissions(), p.getGroupPermissions());
simplifiedProjects.add(sp);
}
return simplifiedProjects;
}
/**
* Renders the user homepage that users see when they log in
*/
private void handlePageRender(final HttpServletRequest req,
final HttpServletResponse resp, final Session session, final ProjectManager manager) {
final User user = session.getUser();
final Page page =
newPage(req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
if (this.lockdownCreateProjects &&
!UserUtils.hasPermissionforAction(this.userManager, user, Permission.Type.CREATEPROJECTS)) {
page.add("hideCreateProject", true);
}
if (hasParam(req, "all")) {
final List<Project> projects = manager.getProjects();
page.add("viewProjects", "all");
page.add("projects", projects);
} else if (hasParam(req, "group")) {
final List<Project> projects = manager.getGroupProjects(user);
page.add("viewProjects", "group");
page.add("projects", projects);
} else {
final List<Project> projects = manager.getUserProjects(user);
page.add("viewProjects", "personal");
page.add("projects", projects);
}
page.render();
}
private void handleDoAction(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException {
if (getParam(req, "doaction").equals("search")) {
final String searchTerm = getParam(req, "searchterm");
if (!searchTerm.equals("") && !searchTerm.equals(".*")) {
handleFilter(req, resp, session, searchTerm);
return;
}
}
}
private void handleFilter(final HttpServletRequest req, final HttpServletResponse resp,
final Session session, final String searchTerm) {
final User user = session.getUser();
final ProjectManager manager =
((AzkabanWebServer) getApplication()).getProjectManager();
final Page page =
newPage(req, resp, session, "azkaban/webapp/servlet/velocity/index.vm");
if (hasParam(req, "all")) {
// do nothing special if one asks for 'ALL' projects
final List<Project> projects = manager.getProjectsByRegex(searchTerm);
page.add("allProjects", "");
page.add("projects", projects);
page.add("search_term", searchTerm);
} else {
final List<Project> projects = manager.getUserProjectsByRegex(user, searchTerm);
page.add("projects", projects);
page.add("search_term", searchTerm);
}
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
// TODO Auto-generated method stub
}
/**
* This class is used to represent a simplified project, which can be returned to end users via
* REST API. This is done in consideration that the API caller only wants certain project level
* information regarding a project, but does not want every flow and every job inside that
* project.
*
* @author jyu
*/
private static class SimplifiedProject {
private int projectId;
private String projectName;
private String createdBy;
private long createdTime;
private List<Pair<String, Permission>> userPermissions;
private List<Pair<String, Permission>> groupPermissions;
public SimplifiedProject(final int projectId, final String projectName,
final String createdBy, final long createdTime,
final List<Pair<String, Permission>> userPermissions,
final List<Pair<String, Permission>> groupPermissions) {
this.projectId = projectId;
this.projectName = projectName;
this.createdBy = createdBy;
this.createdTime = createdTime;
this.userPermissions = userPermissions;
this.groupPermissions = groupPermissions;
}
public int getProjectId() {
return this.projectId;
}
public void setProjectId(final int projectId) {
this.projectId = projectId;
}
public String getProjectName() {
return this.projectName;
}
public void setProjectName(final String projectName) {
this.projectName = projectName;
}
public String getCreatedBy() {
return this.createdBy;
}
public void setCreatedBy(final String createdBy) {
this.createdBy = createdBy;
}
public long getCreatedTime() {
return this.createdTime;
}
public void setCreatedTime(final long createdTime) {
this.createdTime = createdTime;
}
public List<Pair<String, Permission>> getUserPermissions() {
return this.userPermissions;
}
public void setUserPermissions(
final List<Pair<String, Permission>> userPermissions) {
this.userPermissions = userPermissions;
}
public List<Pair<String, Permission>> getGroupPermissions() {
return this.groupPermissions;
}
public void setGroupPermissions(
final List<Pair<String, Permission>> groupPermissions) {
this.groupPermissions = groupPermissions;
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/ScheduleServlet.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.Constants;
import azkaban.executor.ExecutionOptions;
import azkaban.flow.Flow;
import azkaban.flow.Node;
import azkaban.project.Project;
import azkaban.project.ProjectLogEvent.EventType;
import azkaban.project.ProjectManager;
import azkaban.scheduler.Schedule;
import azkaban.scheduler.ScheduleManager;
import azkaban.scheduler.ScheduleManagerException;
import azkaban.server.HttpRequestUtils;
import azkaban.server.session.Session;
import azkaban.sla.SlaAction;
import azkaban.sla.SlaOption;
import azkaban.sla.SlaOption.SlaOptionBuilder;
import azkaban.sla.SlaType;
import azkaban.user.Permission;
import azkaban.user.Permission.Type;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.TimeUtils;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import org.joda.time.LocalDateTime;
import org.joda.time.ReadablePeriod;
import org.joda.time.format.DateTimeFormat;
public class ScheduleServlet extends LoginAbstractAzkabanServlet {
public static final String PARAM_SLA_EMAILS = "slaEmails";
public static final String PARAM_SCHEDULE_ID = "scheduleId";
public static final String PARAM_SETTINGS = "settings";
public static final String PARAM_ERROR = "error";
public static final String PARAM_ALL_JOB_NAMES = "allJobNames";
public static final String PARAM_STATUS = "status";
public static final String PARAM_MESSAGE = "message";
public static final String STATUS_SUCCESS = "success";
public static final String STATUS_ERROR = "error";
public static final String SLA_STATUS_SUCCESS = "SUCCESS";
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger.getLogger(ScheduleServlet.class);
private ProjectManager projectManager;
private ScheduleManager scheduleManager;
private UserManager userManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.userManager = server.getUserManager();
this.projectManager = server.getProjectManager();
this.scheduleManager = server.getScheduleManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
handleGetAllSchedules(req, resp, session);
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
if (ajaxName.equals("slaInfo")) {
ajaxSlaInfo(req, ret, session.getUser());
} else if (ajaxName.equals("setSla")) {
ajaxSetSla(req, ret, session.getUser());
// alias loadFlow is preserved for backward compatibility
} else if (ajaxName.equals("fetchSchedules") || ajaxName.equals("loadFlow")) {
ajaxFetchSchedules(ret);
} else if (ajaxName.equals("scheduleFlow")) {
ajaxScheduleFlow(req, ret, session.getUser());
} else if (ajaxName.equals("scheduleCronFlow")) {
ajaxScheduleCronFlow(req, ret, session.getUser());
} else if (ajaxName.equals("fetchSchedule")) {
ajaxFetchSchedule(req, ret, session.getUser());
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private void ajaxFetchSchedules(final HashMap<String, Object> ret) throws ServletException {
final List<Schedule> schedules;
try {
schedules = this.scheduleManager.getSchedules();
} catch (final ScheduleManagerException e) {
throw new ServletException(e);
}
// See if anything is scheduled
if (schedules.size() <= 0) {
return;
}
final List<HashMap<String, Object>> output =
new ArrayList<>();
ret.put("items", output);
for (final Schedule schedule : schedules) {
try {
writeScheduleData(output, schedule);
} catch (final ScheduleManagerException e) {
throw new ServletException(e);
}
}
}
private void writeScheduleData(final List<HashMap<String, Object>> output,
final Schedule schedule) throws ScheduleManagerException {
final HashMap<String, Object> data = new HashMap<>();
data.put(PARAM_SCHEDULE_ID, schedule.getScheduleId());
data.put("flowname", schedule.getFlowName());
data.put("projectname", schedule.getProjectName());
data.put("time", schedule.getFirstSchedTime());
data.put("cron", schedule.getCronExpression());
final DateTime time = DateTime.now();
long period = 0;
if (schedule.getPeriod() != null) {
period = time.plus(schedule.getPeriod()).getMillis() - time.getMillis();
}
data.put("period", period);
data.put("history", false);
output.add(data);
}
private void ajaxSetSla(final HttpServletRequest req, final HashMap<String, Object> ret,
final User user) {
try {
final int scheduleId = getIntParam(req, PARAM_SCHEDULE_ID);
final Schedule sched = this.scheduleManager.getSchedule(scheduleId);
if (sched == null) {
ret.put(PARAM_ERROR,
"Error loading schedule. Schedule " + scheduleId
+ " doesn't exist");
return;
}
final Project project = this.projectManager.getProject(sched.getProjectId());
if (!hasPermission(project, user, Permission.Type.SCHEDULE)) {
ret.put(PARAM_ERROR, "User " + user
+ " does not have permission to set SLA for this flow.");
return;
}
final String emailStr = getParam(req, PARAM_SLA_EMAILS);
final String[] emailSplit = emailStr.split("\\s*,\\s*|\\s*;\\s*|\\s+");
final List<String> slaEmails = Arrays.asList(emailSplit);
final Map<String, String> settings = getParamGroup(req, PARAM_SETTINGS);
List<SlaOption> slaOptions = new ArrayList<>();
for (final String set : settings.keySet()) {
final SlaOption slaOption;
try {
slaOption = parseSlaSetting(settings.get(set), sched.getFlowName(), slaEmails);
} catch (final Exception e) {
throw new ServletException(e);
}
slaOptions.add(slaOption);
}
if (slaOptions.isEmpty()) {
throw new ScheduleManagerException(
String.format("SLA for schedule %s must have at least one action", scheduleId));
}
sched.getExecutionOptions().setSlaOptions(slaOptions);
this.scheduleManager.insertSchedule(sched);
this.projectManager.postProjectEvent(project, EventType.SLA,
user.getUserId(), "SLA for flow " + sched.getFlowName()
+ " has been added/changed.");
} catch (final ServletException e) {
ret.put(PARAM_ERROR, e.getMessage());
} catch (final ScheduleManagerException e) {
logger.error(e.getMessage(), e);
ret.put(PARAM_ERROR, e.getMessage());
}
}
private SlaOption parseSlaSetting(final String set, String flowName, List<String> emails) throws
ScheduleManagerException {
logger.info("Trying to set sla with the following set: " + set);
final String[] parts = set.split(",", -1);
final String id = parts[0];
final String rule = parts[1];
final String duration = parts[2];
final String emailAction = parts[3];
final String killAction = parts[4];
SlaType type;
if (id.length() == 0) {
if (rule.equals(SLA_STATUS_SUCCESS)) {
type = SlaType.FLOW_SUCCEED;
} else {
type = SlaType.FLOW_FINISH;
}
} else { // JOB
if (rule.equals(SLA_STATUS_SUCCESS)) {
type = SlaType.JOB_SUCCEED;
} else {
type = SlaType.JOB_FINISH;
}
}
HashSet<SlaAction> actions = new HashSet<>();
if (emailAction.equals("true")) {
actions.add(SlaAction.ALERT);
}
if (killAction.equals("true")) {
actions.add(SlaAction.KILL);
}
final Duration dur;
try {
dur = parseDuration(duration);
} catch (final Exception e) {
throw new ScheduleManagerException(
"Unable to parse duration for a SLA that needs to take actions!", e);
}
if (actions.isEmpty()) {
throw new ScheduleManagerException("Unable to create SLA as there is no action set");
}
logger.info("Parsing sla as id:" + id + " type:" + type + " sla:"
+ rule + " Duration:" + duration + " actions:" + actions);
return new SlaOptionBuilder(type, flowName, dur).setJobName(id).setActions(actions)
.setEmails(emails).createSlaOption();
}
private Duration parseDuration(final String duration) {
final int hour = Integer.parseInt(duration.split(":")[0]);
final int min = Integer.parseInt(duration.split(":")[1]);
return Duration.ofMinutes(min + hour * 60);
}
private void ajaxFetchSchedule(final HttpServletRequest req,
final HashMap<String, Object> ret, final User user) throws ServletException {
final int projectId = getIntParam(req, "projectId");
final String flowId = getParam(req, "flowId");
try {
final Schedule schedule = this.scheduleManager.getSchedule(projectId, flowId);
if (schedule != null) {
final Map<String, Object> jsonObj = new HashMap<>();
jsonObj.put(PARAM_SCHEDULE_ID, Integer.toString(schedule.getScheduleId()));
jsonObj.put("submitUser", schedule.getSubmitUser());
jsonObj.put("firstSchedTime",
TimeUtils.formatDateTime(schedule.getFirstSchedTime()));
jsonObj.put("nextExecTime",
TimeUtils.formatDateTime(schedule.getNextExecTime()));
jsonObj.put("period", TimeUtils.formatPeriod(schedule.getPeriod()));
jsonObj.put("cronExpression", schedule.getCronExpression());
jsonObj.put("executionOptions", schedule.getExecutionOptions());
ret.put("schedule", jsonObj);
}
} catch (final ScheduleManagerException e) {
logger.error(e.getMessage(), e);
ret.put(PARAM_ERROR, e);
}
}
private void ajaxSlaInfo(final HttpServletRequest req, final HashMap<String, Object> ret,
final User user) {
final int scheduleId;
try {
scheduleId = getIntParam(req, PARAM_SCHEDULE_ID);
final Schedule sched = this.scheduleManager.getSchedule(scheduleId);
if (sched == null) {
ret.put(PARAM_ERROR,
"Error loading schedule. Schedule " + scheduleId
+ " doesn't exist");
return;
}
final Project project =
getProjectAjaxByPermission(ret, sched.getProjectId(), user, Type.READ);
if (project == null) {
ret.put(PARAM_ERROR,
"Error loading project. Project " + sched.getProjectId()
+ " doesn't exist");
return;
}
final Flow flow = project.getFlow(sched.getFlowName());
if (flow == null) {
ret.put(PARAM_ERROR, "Error loading flow. Flow " + sched.getFlowName()
+ " doesn't exist in " + sched.getProjectId());
return;
}
final List<SlaOption> slaOptions = sched.getExecutionOptions().getSlaOptions();
final ExecutionOptions flowOptions = sched.getExecutionOptions();
if (slaOptions != null && slaOptions.size() > 0) {
ret.put(PARAM_SLA_EMAILS, slaOptions.get(0).getEmails());
final List<Object> setObj = new ArrayList<>();
for (final SlaOption slaOption : slaOptions) {
setObj.add(slaOption.toWebObject());
}
ret.put(PARAM_SETTINGS, setObj);
} else if (flowOptions != null) {
if (flowOptions.getFailureEmails() != null) {
final List<String> emails = flowOptions.getFailureEmails();
if (emails.size() > 0) {
ret.put(PARAM_SLA_EMAILS, emails);
}
}
} else {
if (flow.getFailureEmails() != null) {
final List<String> emails = flow.getFailureEmails();
if (emails.size() > 0) {
ret.put(PARAM_SLA_EMAILS, emails);
}
}
}
final List<String> allJobs = new ArrayList<>();
for (final Node n : flow.getNodes()) {
allJobs.add(n.getId());
}
ret.put(PARAM_ALL_JOB_NAMES, allJobs);
} catch (final ServletException e) {
ret.put(PARAM_ERROR, e);
} catch (final ScheduleManagerException e) {
logger.error(e.getMessage(), e);
ret.put(PARAM_ERROR, e);
}
}
protected Project getProjectAjaxByPermission(final Map<String, Object> ret,
final int projectId, final User user, final Permission.Type type) {
return filterProjectByPermission(this.projectManager.getProject(projectId), user, type, ret);
}
private void handleGetAllSchedules(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/scheduledflowpage.vm");
final List<Schedule> schedules;
try {
schedules = this.scheduleManager.getSchedules();
} catch (final ScheduleManagerException e) {
throw new ServletException(e);
}
page.add("schedules", schedules);
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
final HashMap<String, Object> ret = new HashMap<>();
if (hasParam(req, "action")) {
final String action = getParam(req, "action");
if (action.equals("scheduleFlow")) {
ajaxScheduleFlow(req, ret, session.getUser());
} else if (action.equals("scheduleCronFlow")) {
ajaxScheduleCronFlow(req, ret, session.getUser());
} else if (action.equals("removeSched")) {
ajaxRemoveSched(req, ret, session.getUser());
}
}
if (ret.get(PARAM_STATUS) == (STATUS_SUCCESS)) {
setSuccessMessageInCookie(resp, (String) ret.get(PARAM_MESSAGE));
} else {
setErrorMessageInCookie(resp, (String) ret.get(PARAM_MESSAGE));
}
this.writeJSON(resp, ret);
}
}
private void ajaxRemoveSched(final HttpServletRequest req, final Map<String, Object> ret,
final User user) throws ServletException {
final int scheduleId = getIntParam(req, PARAM_SCHEDULE_ID);
final Schedule sched;
try {
sched = this.scheduleManager.getSchedule(scheduleId);
} catch (final ScheduleManagerException e) {
throw new ServletException(e);
}
if (sched == null) {
ret.put(PARAM_MESSAGE, "Schedule with ID " + scheduleId + " does not exist");
ret.put(PARAM_STATUS, STATUS_ERROR);
return;
}
final Project project = this.projectManager.getProject(sched.getProjectId());
if (project == null) {
ret.put(PARAM_MESSAGE, "Project " + sched.getProjectId() + " does not exist");
ret.put(PARAM_STATUS, STATUS_ERROR);
return;
}
if (!hasPermission(project, user, Type.SCHEDULE)) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Permission denied. Cannot remove schedule with id "
+ scheduleId);
return;
}
this.scheduleManager.removeSchedule(sched);
logger.info("User '" + user.getUserId() + " has removed schedule "
+ sched.getScheduleName());
this.projectManager
.postProjectEvent(project, EventType.SCHEDULE, user.getUserId(),
"Schedule " + sched.toString() + " has been removed.");
ret.put(PARAM_STATUS, STATUS_SUCCESS);
ret.put(PARAM_MESSAGE, "flow " + sched.getFlowName()
+ " removed from Schedules.");
return;
}
@Deprecated
private void ajaxScheduleFlow(final HttpServletRequest req,
final HashMap<String, Object> ret, final User user) throws ServletException {
final String projectName = getParam(req, "projectName");
final String flowName = getParam(req, "flow");
final int projectId = getIntParam(req, "projectId");
final Project project = this.projectManager.getProject(projectId);
if (project == null) {
ret.put(PARAM_MESSAGE, "Project " + projectName + " does not exist");
ret.put(PARAM_STATUS, STATUS_ERROR);
return;
}
if (!hasPermission(project, user, Type.SCHEDULE)) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Permission denied. Cannot execute " + flowName);
return;
}
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Flow " + flowName + " cannot be found in project "
+ projectName);
return;
}
final String scheduleTime = getParam(req, "scheduleTime");
final String scheduleDate = getParam(req, "scheduleDate");
final DateTime firstSchedTime;
try {
firstSchedTime = parseDateTime(scheduleDate, scheduleTime);
} catch (final Exception e) {
ret.put(PARAM_ERROR, "Invalid date and/or time '" + scheduleDate + " "
+ scheduleTime);
return;
}
final long endSchedTime = getLongParam(req, "endSchedTime",
Constants.DEFAULT_SCHEDULE_END_EPOCH_TIME);
try {
// Todo kunkun-tang: Need to verify if passed end time is valid.
} catch (final Exception e) {
ret.put(PARAM_ERROR, "Invalid date and time: " + endSchedTime);
return;
}
ReadablePeriod thePeriod = null;
try {
if (hasParam(req, "is_recurring")
&& getParam(req, "is_recurring").equals("on")) {
thePeriod = TimeUtils.parsePeriodString(getParam(req, "period"));
}
} catch (final Exception e) {
ret.put(PARAM_ERROR, e.getMessage());
}
ExecutionOptions flowOptions = null;
try {
flowOptions = HttpRequestUtils.parseFlowOptions(req);
HttpRequestUtils.filterAdminOnlyFlowParams(this.userManager, flowOptions, user);
} catch (final Exception e) {
ret.put(PARAM_ERROR, e.getMessage());
}
final Schedule schedule =
this.scheduleManager.scheduleFlow(-1, projectId, projectName, flowName,
"ready", firstSchedTime.getMillis(), endSchedTime, firstSchedTime.getZone(),
thePeriod, DateTime.now().getMillis(), firstSchedTime.getMillis(),
firstSchedTime.getMillis(), user.getUserId(), flowOptions);
logger.info("User '" + user.getUserId() + "' has scheduled " + "["
+ projectName + flowName + " (" + projectId + ")" + "].");
this.projectManager.postProjectEvent(project, EventType.SCHEDULE,
user.getUserId(), "Schedule " + schedule.toString()
+ " has been added.");
ret.put(PARAM_STATUS, STATUS_SUCCESS);
ret.put(PARAM_SCHEDULE_ID, schedule.getScheduleId());
ret.put(PARAM_MESSAGE, projectName + "." + flowName + " scheduled.");
}
/**
* This method is in charge of doing cron scheduling.
*/
private void ajaxScheduleCronFlow(final HttpServletRequest req,
final HashMap<String, Object> ret, final User user) throws ServletException {
final String projectName = getParam(req, "projectName");
final String flowName = getParam(req, "flow");
final Project project = this.projectManager.getProject(projectName);
if (project == null) {
ret.put(PARAM_MESSAGE, "Project " + projectName + " does not exist");
ret.put(PARAM_STATUS, STATUS_ERROR);
return;
}
final int projectId = project.getId();
if (!hasPermission(project, user, Type.SCHEDULE)) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Permission denied. Cannot execute " + flowName);
return;
}
final Flow flow = project.getFlow(flowName);
if (flow == null) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Flow " + flowName + " cannot be found in project "
+ projectName);
return;
}
if (flow.isLocked()) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, "Flow " + flowName + " in project " + projectName + " is locked.");
return;
}
final boolean hasFlowTrigger;
try {
hasFlowTrigger = this.projectManager.hasFlowTrigger(project, flow);
} catch (final Exception ex) {
logger.error(ex);
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, String.format("Error looking for flow trigger of flow: %s.%s ",
projectName, flowName));
return;
}
if (hasFlowTrigger) {
ret.put(PARAM_STATUS, STATUS_ERROR);
ret.put(PARAM_MESSAGE, String.format("<font color=\"red\"> Error: Flow %s.%s is already "
+ "associated with flow trigger, so schedule has to be defined in flow trigger config </font>",
projectName, flowName));
return;
}
final DateTimeZone timezone = DateTimeZone.getDefault();
final DateTime firstSchedTime = getPresentTimeByTimezone(timezone);
String cronExpression = null;
try {
if (hasParam(req, "cronExpression")) {
// everything in Azkaban functions is at the minute granularity, so we add 0 here
// to let the expression to be complete.
cronExpression = getParam(req, "cronExpression");
if (azkaban.utils.Utils.isCronExpressionValid(cronExpression, timezone) == false) {
ret.put(PARAM_ERROR,
"This expression <" + cronExpression + "> can not be parsed to quartz cron.");
return;
}
}
if (cronExpression == null) {
throw new Exception("Cron expression must exist.");
}
} catch (final Exception e) {
ret.put(PARAM_ERROR, e.getMessage());
}
final long endSchedTime = getLongParam(req, "endSchedTime",
Constants.DEFAULT_SCHEDULE_END_EPOCH_TIME);
try {
// Todo kunkun-tang: Need to verify if passed end time is valid.
} catch (final Exception e) {
ret.put(PARAM_ERROR, "Invalid date and time: " + endSchedTime);
return;
}
ExecutionOptions flowOptions = null;
try {
flowOptions = HttpRequestUtils.parseFlowOptions(req);
HttpRequestUtils.filterAdminOnlyFlowParams(this.userManager, flowOptions, user);
} catch (final Exception e) {
ret.put(PARAM_ERROR, e.getMessage());
}
// Because either cronExpression or recurrence exists, we build schedule in the below way.
final Schedule schedule = this.scheduleManager
.cronScheduleFlow(-1, projectId, projectName, flowName,
"ready", firstSchedTime.getMillis(), endSchedTime, firstSchedTime.getZone(),
DateTime.now().getMillis(), firstSchedTime.getMillis(),
firstSchedTime.getMillis(), user.getUserId(), flowOptions,
cronExpression);
logger.info("User '" + user.getUserId() + "' has scheduled " + "["
+ projectName + flowName + " (" + projectId + ")" + "].");
this.projectManager.postProjectEvent(project, EventType.SCHEDULE,
user.getUserId(), "Schedule " + schedule.toString()
+ " has been added.");
ret.put(PARAM_STATUS, STATUS_SUCCESS);
ret.put(PARAM_SCHEDULE_ID, schedule.getScheduleId());
ret.put(PARAM_MESSAGE, projectName + "." + flowName + " scheduled.");
}
private DateTime parseDateTime(final String scheduleDate, final String scheduleTime) {
// scheduleTime: 12,00,pm,PDT
final String[] parts = scheduleTime.split(",", -1);
int hour = Integer.parseInt(parts[0]);
final int minutes = Integer.parseInt(parts[1]);
final boolean isPm = parts[2].equalsIgnoreCase("pm");
final DateTimeZone timezone =
parts[3].equals("UTC") ? DateTimeZone.UTC : DateTimeZone.getDefault();
// scheduleDate: 02/10/2013
DateTime day = null;
if (scheduleDate == null || scheduleDate.trim().length() == 0) {
day = new LocalDateTime().toDateTime();
} else {
day = DateTimeFormat.forPattern("MM/dd/yyyy")
.withZone(timezone).parseDateTime(scheduleDate);
}
hour %= 12;
if (isPm) {
hour += 12;
}
final DateTime firstSchedTime =
day.withHourOfDay(hour).withMinuteOfHour(minutes).withSecondOfMinute(0);
return firstSchedTime;
}
/**
* @param cronTimezone represents the timezone from remote API call
* @return if the string is equal to UTC, we return UTC; otherwise, we always return default
* timezone.
*/
private DateTimeZone parseTimeZone(final String cronTimezone) {
if (cronTimezone != null && cronTimezone.equals("UTC")) {
return DateTimeZone.UTC;
}
return DateTimeZone.getDefault();
}
private DateTime getPresentTimeByTimezone(final DateTimeZone timezone) {
return new DateTime(timezone);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/StatsServlet.java
|
/*
* Copyright 2014 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.executor.ConnectorParams;
import azkaban.executor.Executor;
import azkaban.executor.ExecutorManagerAdapter;
import azkaban.executor.ExecutorManagerException;
import azkaban.server.session.Session;
import azkaban.user.User;
import azkaban.user.UserManager;
import azkaban.utils.Pair;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
/**
* User facing servlet for Azkaban default metric display
*/
public class StatsServlet extends LoginAbstractAzkabanServlet {
private static final Logger logger = Logger.getLogger(StatsServlet.class);
private static final long serialVersionUID = 1L;
private UserManager userManager;
private ExecutorManagerAdapter execManagerAdapter;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.userManager = server.getUserManager();
this.execManagerAdapter = server.getExecutorManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session)
throws ServletException,
IOException {
if (hasParam(req, ConnectorParams.ACTION_PARAM)) {
handleAJAXAction(req, resp, session);
} else {
handleStatePageLoad(req, resp, session);
}
}
private void handleAJAXAction(final HttpServletRequest req, final HttpServletResponse resp,
final Session session)
throws ServletException, IOException {
final HashMap<String, Object> ret = new HashMap<>();
final int executorId = getIntParam(req, ConnectorParams.EXECUTOR_ID_PARAM);
final String actionName = getParam(req, ConnectorParams.ACTION_PARAM);
if (actionName.equals(ConnectorParams.STATS_GET_METRICHISTORY)) {
handleGetMetricHistory(executorId, req, ret, session.getUser());
} else if (actionName.equals(ConnectorParams.STATS_GET_ALLMETRICSNAME)) {
handleGetAllMetricName(executorId, req, ret);
} else if (actionName.equals(ConnectorParams.STATS_SET_REPORTINGINTERVAL)) {
handleChangeConfigurationRequest(executorId, ConnectorParams.STATS_SET_REPORTINGINTERVAL, req,
ret);
} else if (actionName.equals(ConnectorParams.STATS_SET_CLEANINGINTERVAL)) {
handleChangeConfigurationRequest(executorId, ConnectorParams.STATS_SET_CLEANINGINTERVAL, req,
ret);
} else if (actionName.equals(ConnectorParams.STATS_SET_MAXREPORTERPOINTS)) {
handleChangeConfigurationRequest(executorId, ConnectorParams.STATS_SET_MAXREPORTERPOINTS, req,
ret);
} else if (actionName.equals(ConnectorParams.STATS_SET_ENABLEMETRICS)) {
handleChangeConfigurationRequest(executorId, ConnectorParams.STATS_SET_ENABLEMETRICS, req,
ret);
} else if (actionName.equals(ConnectorParams.STATS_SET_DISABLEMETRICS)) {
handleChangeConfigurationRequest(executorId, ConnectorParams.STATS_SET_DISABLEMETRICS, req,
ret);
}
writeJSON(resp, ret);
}
/**
* Get all metrics tracked by the given executor
*/
private void handleGetAllMetricName(final int executorId, final HttpServletRequest req,
final HashMap<String, Object> ret) throws IOException {
final Map<String, Object> result;
try {
result =
this.execManagerAdapter.callExecutorStats(executorId,
ConnectorParams.STATS_GET_ALLMETRICSNAME,
(Pair<String, String>[]) null);
if (result.containsKey(ConnectorParams.RESPONSE_ERROR)) {
ret.put("error", result.get(ConnectorParams.RESPONSE_ERROR).toString());
} else {
ret.put("metricList", result.get("data"));
}
} catch (final ExecutorManagerException e) {
logger.error(e.getMessage(), e);
ret.put("error", "Failed to fetch metric names for executor : "
+ executorId);
}
}
/**
* Generic method to facilitate actionName action using Azkaban exec server
*
* @param actionName Name of the action
*/
private void handleChangeConfigurationRequest(final int executorId, final String actionName,
final HttpServletRequest req, final HashMap<String, Object> ret)
throws ServletException, IOException {
try {
final Map<String, Object> result =
this.execManagerAdapter
.callExecutorStats(executorId, actionName, getAllParams(req));
if (result.containsKey(ConnectorParams.RESPONSE_ERROR)) {
ret.put(ConnectorParams.RESPONSE_ERROR,
result.get(ConnectorParams.RESPONSE_ERROR).toString());
} else {
ret.put(ConnectorParams.STATUS_PARAM,
result.get(ConnectorParams.STATUS_PARAM));
}
} catch (final ExecutorManagerException ex) {
logger.error(ex.getMessage(), ex);
ret.put("error", "Failed to change config change");
}
}
/**
* Get metric snapshots for a metric and date specification
*/
private void handleGetMetricHistory(final int executorId, final HttpServletRequest req,
final HashMap<String, Object> ret, final User user) throws IOException,
ServletException {
try {
final Map<String, Object> result =
this.execManagerAdapter.callExecutorStats(executorId,
ConnectorParams.STATS_GET_METRICHISTORY, getAllParams(req));
if (result.containsKey(ConnectorParams.RESPONSE_ERROR)) {
ret.put(ConnectorParams.RESPONSE_ERROR,
result.get(ConnectorParams.RESPONSE_ERROR).toString());
} else {
ret.put("data", result.get("data"));
}
} catch (final ExecutorManagerException ex) {
logger.error(ex.getMessage(), ex);
ret.put("error", "Failed to fetch metric history");
}
}
/**
* @throws ExecutorManagerException
*
*/
private void handleStatePageLoad(final HttpServletRequest req, final HttpServletResponse resp,
final Session session)
throws ServletException {
final Page page = newPage(req, resp, session, "azkaban/webapp/servlet/velocity/statsPage.vm");
try {
final Collection<Executor> executors = this.execManagerAdapter.getAllActiveExecutors();
page.add("executorList", executors);
if (executors.isEmpty()) {
throw new ExecutorManagerException("Executor list is empty.");
}
final Map<String, Object> result =
this.execManagerAdapter.callExecutorStats(executors.iterator().next().getId(),
ConnectorParams.STATS_GET_ALLMETRICSNAME,
(Pair<String, String>[]) null);
if (result.containsKey(ConnectorParams.RESPONSE_ERROR)) {
page.add("errorMsg", result.get(ConnectorParams.RESPONSE_ERROR)
.toString());
} else {
page.add("metricList", result.get("data"));
}
} catch (final Exception e) {
logger.error(e.getMessage(), e);
page.add("errorMsg", "Failed to get a response from Azkaban exec server");
}
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session)
throws ServletException,
IOException {
}
/**
* Parse all Http request params
*/
private Pair<String, String>[] getAllParams(final HttpServletRequest req) {
final List<Pair<String, String>> allParams = new LinkedList<>();
final Iterator it = req.getParameterMap().entrySet().iterator();
while (it.hasNext()) {
final Map.Entry pairs = (Map.Entry) it.next();
for (final Object value : (String[]) pairs.getValue()) {
allParams.add(new Pair<>((String) pairs.getKey(), (String) value));
}
}
return allParams.toArray(new Pair[allParams.size()]);
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/StatusServlet.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package azkaban.webapp.servlet;
import static azkaban.webapp.servlet.AbstractAzkabanServlet.JSON_MIME_TYPE;
import azkaban.webapp.StatusService;
import com.google.gson.GsonBuilder;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class StatusServlet extends HttpServlet {
private static final Logger log = LoggerFactory.getLogger(StatusServlet.class);
private final StatusService statusService;
public StatusServlet(final StatusService statusService) {
this.statusService = statusService;
}
@Override
protected void doGet(final HttpServletRequest req, final HttpServletResponse resp)
throws ServletException, IOException {
try {
resp.setContentType(JSON_MIME_TYPE);
resp.getOutputStream()
.println(new GsonBuilder()
.setPrettyPrinting()
.create()
.toJson(this.statusService.getStatus()));
resp.setStatus(HttpServletResponse.SC_OK);
} catch (final Exception e) {
log.error("Error!! while reporting status: ", e);
resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
} finally {
resp.getOutputStream().close();
}
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/TriggerManagerServlet.java
|
/*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.server.session.Session;
import azkaban.trigger.Trigger;
import azkaban.trigger.TriggerManager;
import azkaban.trigger.TriggerManagerException;
import azkaban.user.User;
import azkaban.webapp.AzkabanWebServer;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
public class TriggerManagerServlet extends LoginAbstractAzkabanServlet {
private static final long serialVersionUID = 1L;
private static final Logger logger = Logger
.getLogger(TriggerManagerServlet.class);
private TriggerManager triggerManager;
@Override
public void init(final ServletConfig config) throws ServletException {
super.init(config);
final AzkabanWebServer server = (AzkabanWebServer) getApplication();
this.triggerManager = server.getTriggerManager();
}
@Override
protected void handleGet(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
} else {
handleGetAllSchedules(req, resp, session);
}
}
private void handleAJAXAction(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final HashMap<String, Object> ret = new HashMap<>();
final String ajaxName = getParam(req, "ajax");
try {
if (ajaxName.equals("expireTrigger")) {
ajaxExpireTrigger(req, ret, session.getUser());
}
} catch (final Exception e) {
ret.put("error", e.getMessage());
}
if (ret != null) {
this.writeJSON(resp, ret);
}
}
private void handleGetAllSchedules(final HttpServletRequest req,
final HttpServletResponse resp, final Session session) throws ServletException,
IOException {
final Page page =
newPage(req, resp, session,
"azkaban/webapp/servlet/velocity/triggerspage.vm");
final List<Trigger> triggers = this.triggerManager.getTriggers();
page.add("triggers", triggers);
page.render();
}
@Override
protected void handlePost(final HttpServletRequest req, final HttpServletResponse resp,
final Session session) throws ServletException, IOException {
if (hasParam(req, "ajax")) {
handleAJAXAction(req, resp, session);
}
}
private void ajaxExpireTrigger(final HttpServletRequest req,
final Map<String, Object> ret, final User user) throws ServletException,
TriggerManagerException {
final int triggerId = getIntParam(req, "triggerId");
final Trigger t = this.triggerManager.getTrigger(triggerId);
if (t == null) {
ret.put("message", "Trigger with ID " + triggerId + " does not exist");
ret.put("status", "error");
return;
}
this.triggerManager.expireTrigger(triggerId);
logger.info("User '" + user.getUserId() + " has removed trigger "
+ t.getDescription());
ret.put("status", "success");
ret.put("message", "trigger " + triggerId + " removed from Schedules.");
return;
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/VelocityUtil.java
|
/*
* Copyright 2017 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.project.Project;
import azkaban.project.ProjectManager;
public class VelocityUtil {
ProjectManager projectManager;
public VelocityUtil(final ProjectManager projectManager) {
this.projectManager = projectManager;
}
public String getProjectName(final int id) {
final Project project = this.projectManager.getProject(id);
if (project == null) {
return String.valueOf(id);
}
return project.getName();
}
}
|
0
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp
|
java-sources/ai/databand/azkaban/azkaban-web-server/3.90.0/azkaban/webapp/servlet/WebUtils.java
|
/*
* Copyright 2012 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package azkaban.webapp.servlet;
import azkaban.executor.Status;
import java.text.NumberFormat;
import java.util.HashMap;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
public class WebUtils {
public static final String X_FORWARDED_FOR_HEADER = "X-Forwarded-For";
private static final long ONE_KB = 1024;
private static final long ONE_MB = 1024 * ONE_KB;
private static final long ONE_GB = 1024 * ONE_MB;
private static final long ONE_TB = 1024 * ONE_GB;
public static String displayBytes(final long sizeBytes) {
final NumberFormat nf = NumberFormat.getInstance();
nf.setMaximumFractionDigits(2);
if (sizeBytes >= ONE_TB) {
return nf.format(sizeBytes / (double) ONE_TB) + " tb";
} else if (sizeBytes >= ONE_GB) {
return nf.format(sizeBytes / (double) ONE_GB) + " gb";
} else if (sizeBytes >= ONE_MB) {
return nf.format(sizeBytes / (double) ONE_MB) + " mb";
} else if (sizeBytes >= ONE_KB) {
return nf.format(sizeBytes / (double) ONE_KB) + " kb";
} else {
return sizeBytes + " B";
}
}
public static String formatStatus(final Status status) {
switch (status) {
case SUCCEEDED:
return "Success";
case FAILED:
return "Failed";
case RUNNING:
return "Running";
case DISABLED:
return "Disabled";
case KILLED:
return "Killed";
case FAILED_FINISHING:
return "Running w/Failure";
case PREPARING:
return "Preparing";
case READY:
return "Ready";
case PAUSED:
return "Paused";
case SKIPPED:
return "Skipped";
case KILLING:
return "Killing";
default:
}
return "Unknown";
}
/**
* Gets the actual client IP address on a best effort basis as user could be sitting
* behind a VPN. Get the IP by inspecting the X-Forwarded-For HTTP header or using the
* provided 'remote IP address' from the low level TCP connection from the client.
*
* If multiple IP addresses are provided in the X-Forwarded-For header then the first one (first
* hop) is used
*
* @param httpHeaders List of HTTP headers for the current request
* @param remoteAddr The client IP address and port from the current request's TCP connection
* @return The actual client IP address
*/
// TODO djaiswal83: Refactor this code and merge into single API
public static String getRealClientIpAddr(final Map<String, String> httpHeaders,
final String remoteAddr) {
// If some upstream device added an X-Forwarded-For header
// use it for the client ip
// This will support scenarios where load balancers or gateways
// front the Azkaban web server and a changing Ip address invalidates the session
String clientIp = httpHeaders.getOrDefault(X_FORWARDED_FOR_HEADER, null);
if (clientIp == null) {
clientIp = remoteAddr;
} else {
// header can contain comma separated list of upstream servers - get the first one
final String[] ips = clientIp.split(",");
clientIp = ips[0];
}
// Strip off port and only get IP address
final String[] parts = clientIp.split(":");
clientIp = parts[0];
return clientIp;
}
/**
* Gets the actual client IP address on a best effort basis as user could be sitting
* behind a VPN. Get the IP by inspecting the X-Forwarded-For HTTP header or using the
* provided 'remote IP address' from the low level TCP connection from the client.
*
* If multiple IP addresses are provided in the X-Forwarded-For header then the first one (first
* hop) is used
*
* @param req HttpServletRequest
* @return The actual client IP address
*/
public static String getRealClientIpAddr(final HttpServletRequest req) {
// If some upstream device added an X-Forwarded-For header
// use it for the client ip
// This will support scenarios where load balancers or gateways
// front the Azkaban web server and a changing Ip address invalidates
// the session
final HashMap<String, String> headers = new HashMap<>();
headers.put(WebUtils.X_FORWARDED_FOR_HEADER,
req.getHeader(WebUtils.X_FORWARDED_FOR_HEADER.toLowerCase()));
return WebUtils.getRealClientIpAddr(headers, req.getRemoteAddr());
}
}
|
0
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand/agent/ActiveJobTransformer.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022-2024
*/
package ai.databand.agent;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtConstructor;
import javassist.LoaderClassPath;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.lang.instrument.ClassFileTransformer;
import java.security.ProtectionDomain;
import org.slf4j.LoggerFactory;
import ai.databand.DbndAppLog;
public class ActiveJobTransformer implements ClassFileTransformer {
private static final DbndAppLog LOG = new DbndAppLog(LoggerFactory.getLogger(ActiveJobTransformer.class));
@Override
public byte[] transform(ClassLoader loader,
String className,
Class<?> classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) {
if (!"org/apache/spark/scheduler/ActiveJob".equalsIgnoreCase(className)) {
return classfileBuffer;
}
try (InputStream is = new ByteArrayInputStream(classfileBuffer)) {
LOG.info("Databand tracking of the Spark class 'ActiveJob'");
ClassPool cp = ClassPool.getDefault();
cp.appendClassPath(new LoaderClassPath(loader));
CtClass ct = cp.makeClass(is);
for (CtConstructor constructor : ct.getConstructors()) {
if (constructor.callsSuper()) {
constructor.insertAfter("{ ai.databand.spark.ActiveJobTracker.track(this); }");
}
}
return ct.toBytecode();
} catch (Throwable e) {
LOG.info("Databand tracking failed to modify the 'ActiveJob' class.");
e.printStackTrace();
return null;
}
}
}
|
0
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand/agent/DbndAgent.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022-2024
*/
package ai.databand.agent;
import ai.databand.DbndAppLog;
import ai.databand.config.DbndAgentConfig;
import java.io.IOException;
import java.io.InputStream;
import java.lang.instrument.Instrumentation;
import java.lang.management.ManagementFactory;
import java.util.Properties;
public class DbndAgent {
public static void premain(String agentArgs, Instrumentation inst) {
Properties props = new Properties();
try (InputStream input = DbndAgent.class.getClassLoader().getResourceAsStream("application.properties")) {
props.load(input);
} catch (IOException e) {
// shouldn't occur
e.printStackTrace();
}
String jvmName = ManagementFactory.getRuntimeMXBean().getName();
DbndAppLog.printfln(org.slf4j.event.Level.INFO, "Successfully injected dbnd-agent (%s) on jvm process %s", props.getProperty("version"), jvmName);
// this is workaround for spark-submit case
// for some reason CallSite is not loaded during instrumentation phase, so we have to load it before
try {
Class.forName("java.lang.invoke.CallSite");
} catch (Throwable e) {
e.printStackTrace();
}
DbndAgentConfig config = new DbndAgentConfig(agentArgs);
inst.addTransformer(new DbndTrackingTransformer(config));
if (config.sparkIoTrackingEnabled()) {
inst.addTransformer(new ActiveJobTransformer());
}
}
}
|
0
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-agent/1.0.28.1/ai/databand/agent/DbndTrackingTransformer.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022-2024
*/
package ai.databand.agent;
import ai.databand.DbndAppLog;
import ai.databand.config.DbndAgentConfig;
import javassist.ClassPool;
import javassist.CtClass;
import javassist.CtField;
import javassist.CtMethod;
import javassist.LoaderClassPath;
import javassist.NotFoundException;
import javassist.bytecode.AnnotationsAttribute;
import javassist.bytecode.DuplicateMemberException;
import javassist.bytecode.MethodInfo;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.lang.instrument.ClassFileTransformer;
import java.lang.management.ManagementFactory;
import java.security.ProtectionDomain;
import java.util.LinkedList;
import java.util.List;
import org.slf4j.LoggerFactory;
public class DbndTrackingTransformer implements ClassFileTransformer {
private static final String TASK_ANNOTATION = "ai.databand.annotations.Task";
private static final DbndAppLog LOG = new DbndAppLog(LoggerFactory.getLogger(DbndTrackingTransformer.class));
private final DbndAgentConfig config;
public DbndTrackingTransformer(DbndAgentConfig config) {
this.config = config;
}
public byte[] transform(ClassLoader loader,
String className,
Class classBeingRedefined,
ProtectionDomain protectionDomain,
byte[] classfileBuffer) {
ClassPool cp = ClassPool.getDefault();
cp.appendClassPath(new LoaderClassPath(loader));
try(InputStream is = new ByteArrayInputStream(classfileBuffer)) {
CtClass ct = cp.makeClass(is);
String jvmName = ManagementFactory.getRuntimeMXBean().getName();
List<CtMethod> annotatedMethods = getAnnotatedMethods(cp, ct, className, classfileBuffer);
if (annotatedMethods.isEmpty()) {
return null;
}
// add $dbnd variable
try {
ct.addField(CtField.make("static ai.databand.DbndWrapper $dbnd = ai.databand.DbndWrapper.instance();", ct));
} catch (DuplicateMemberException e) {
// do nothing
}
for (CtMethod method : annotatedMethods) {
// wrap methods annotated by @Task
MethodInfo methodInfo = method.getMethodInfo();
CtClass tr = cp.get("java.lang.Throwable");
LOG.verbose("Databand tracking of @Task annotated method '{}.{}()'", className, methodInfo.getName());
method.insertBefore("{ $dbnd.beforeTask(\"" + ct.getName() + "\", \"" + method.getLongName() + "\", $args); }");
method.insertAfter("{ $dbnd.afterTask(\"" + method.getLongName() + "\", (Object) ($w) $_); }");
method.addCatch("{ $dbnd.errorTask(\"" + method.getLongName() + "\", $e); throw $e; }", tr);
}
LOG.info("Databand has succesfully detected and has started tracking of {} @Task annotated methods out of {} total methods declared directly inside the class '{}'", annotatedMethods.size(), ct.getDeclaredMethods().length, className);
return ct.toBytecode();
} catch (RuntimeException e) {
if (e.getMessage() != null && e.getMessage().contains("frozen")) {
return null;
}
} catch (Throwable e) {
LOG.error("Databand failed to add runtime tracking to class {}", className);
e.printStackTrace();
return null;
}
return classfileBuffer;
}
protected List<CtMethod> getAnnotatedMethods(ClassPool cp, CtClass ct, String className, byte[] classfileBuffer) {
List<CtMethod> annotatedMethods = new LinkedList<CtMethod>();
CtMethod[] declaredMethods = ct.getDeclaredMethods();
for (CtMethod method : declaredMethods) {
MethodInfo methodInfo = method.getMethodInfo();
AnnotationsAttribute attInfo = (AnnotationsAttribute) methodInfo.getAttribute(AnnotationsAttribute.visibleTag);
if (attInfo == null) {
continue;
}
if (attInfo.getAnnotation(TASK_ANNOTATION) != null) {
// check if scala object
if (!isScalaObject(cp, className)) {
return new LinkedList<CtMethod>();
}
annotatedMethods.add(method);
}
}
return annotatedMethods;
}
protected boolean isScalaObject(ClassPool cp, String className) {
if (className.contains("$")) {
// this is (probably) scala class
return true;
}
// this can be scala object. Let's check if it has class with '$'
try {
cp.get(className + '$');
// oops! there is actual scala class in classpath
return false;
} catch (NotFoundException e) {
return true;
}
}
}
|
0
|
java-sources/ai/databand/dbnd-api/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-api/1.0.28.1/ai/databand/annotations/Task.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.annotations;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* This annotation marks method as a logical part of pipeline. Every metric captured
* during method execution will be reported to Databand. Nested methods, not annotated as @Task
* will treated as the same method.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Task {
String value() default "";
}
|
0
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand/deequ/DbndMetricsRepository.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.deequ;
import ai.databand.DbndWrapper;
import com.amazon.deequ.analyzers.runners.AnalyzerContext;
import com.amazon.deequ.repository.MetricsRepository;
import com.amazon.deequ.repository.MetricsRepositoryMultipleResultsLoader;
import com.amazon.deequ.repository.ResultKey;
import scala.Option;
import scala.collection.JavaConverters;
import java.util.Map;
/**
* Deequ metrics repository implementation. Reports all Deequ metrics to Databand.
*/
public class DbndMetricsRepository implements MetricsRepository {
private final DbndWrapper dbnd;
private final MetricsRepository origin;
public DbndMetricsRepository(DbndWrapper dbnd) {
this.dbnd = dbnd;
this.origin = new NoopMetricsRepository();
}
public DbndMetricsRepository(DbndWrapper dbnd, MetricsRepository originRepo) {
this.dbnd = dbnd;
this.origin = originRepo;
}
public DbndMetricsRepository() {
this.dbnd = DbndWrapper.instance();
this.origin = new NoopMetricsRepository();
}
public DbndMetricsRepository(MetricsRepository originRepo) {
this.dbnd = DbndWrapper.instance();
this.origin = originRepo;
}
@Override
public void save(ResultKey resultKey, AnalyzerContext analyzerContext) {
origin.save(resultKey, analyzerContext);
String dfName;
if (resultKey instanceof DbndResultKey) {
dfName = ((DbndResultKey) resultKey).dataSetName();
} else {
Map<String, String> tags = JavaConverters.mapAsJavaMapConverter(resultKey.tags()).asJava();
dfName = tags.getOrDefault("name", "data");
}
DeequToDbnd converted = new DeequToDbnd(dfName, analyzerContext);
dbnd.logMetrics(converted.metrics());
dbnd.logHistogram(converted.histograms());
}
@Override
public Option<AnalyzerContext> loadByKey(ResultKey resultKey) {
return origin.loadByKey(resultKey);
}
@Override
public MetricsRepositoryMultipleResultsLoader load() {
return origin.load();
}
}
|
0
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand/deequ/DbndResultKey.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.deequ;
import com.amazon.deequ.repository.ResultKey;
import scala.collection.immutable.Map;
public class DbndResultKey extends ResultKey {
private final String dataSetName;
public DbndResultKey(long dataSetDate, Map<String, String> tags, String dataSetName) {
super(dataSetDate, tags);
this.dataSetName = dataSetName;
}
public DbndResultKey(String dataSetName) {
super(System.currentTimeMillis(), scala.collection.immutable.Map$.MODULE$.<String, String>empty());
this.dataSetName = dataSetName;
}
public DbndResultKey(long dataSetDate, Map<String, String> tags) {
super(dataSetDate, tags);
this.dataSetName = "dataSet";
}
public String dataSetName() {
return dataSetName;
}
}
|
0
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand/deequ/DeequToDbnd.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.deequ;
import com.amazon.deequ.analyzers.runners.AnalyzerContext;
import com.amazon.deequ.metrics.Distribution;
import com.amazon.deequ.metrics.DistributionValue;
import com.amazon.deequ.metrics.Metric;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import scala.collection.JavaConverters;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
public class DeequToDbnd {
private static final Logger LOG = LoggerFactory.getLogger(DeequToDbnd.class);
private final String dfName;
private final List<Metric<?>> deequMetrics;
private static final Map<String, String> DEEQU_TO_SPARK = new HashMap<>();
static {
DEEQU_TO_SPARK.put("ApproxCountDistinct", "distinct");
DEEQU_TO_SPARK.put("Minimum", "min");
DEEQU_TO_SPARK.put("Maximum", "max");
DEEQU_TO_SPARK.put("Mean", "mean");
DEEQU_TO_SPARK.put("StandardDeviation", "stddev");
DEEQU_TO_SPARK.put("Histogram", "histogram");
}
private static final Map<String, String> DEEQU_TO_DBND = new HashMap<>();
static {
DEEQU_TO_DBND.put("Boolean", "boolean");
DEEQU_TO_DBND.put("Fractional", "double");
DEEQU_TO_DBND.put("Integral", "integer");
DEEQU_TO_DBND.put("String", "string");
DEEQU_TO_DBND.put("Unknown", "string");
}
public DeequToDbnd(String dfName, AnalyzerContext analyzerContext) {
this.dfName = dfName;
deequMetrics = JavaConverters.seqAsJavaListConverter(analyzerContext.metricMap().values().toSeq()).asJava();
}
public Map<String, Object> metrics() {
Map<String, Object> metrics = new HashMap<>(1);
for (Metric<?> m : deequMetrics) {
// skip histogram metrics
if (m.value().isSuccess() && m.value().get() instanceof Distribution) {
continue;
}
String metricKey = String.format("deequ.%s.%s.%s", dfName, m.instance(), m.name());
if (m.value().isFailure()) {
LOG.error("Deequ calculation failed for key [{}]. Reason: {}", metricKey, m.value().failed().get().getMessage());
}
Object value = m.value().isSuccess() ? m.value().get() : "Failure";
metrics.put(metricKey, value);
}
return metrics;
}
public Map<String, Object> histograms() {
return buildHistograms(dfName, deequMetrics);
}
public Map<String, Object> buildHistograms(String dfName, List<Metric<?>> deequMetrics) {
Set<String> histogrammedCols = deequMetrics.stream()
.filter(m -> "Histogram".equalsIgnoreCase(m.name()))
.map(Metric::instance)
.collect(Collectors.toSet());
if (histogrammedCols.isEmpty()) {
return Collections.emptyMap();
}
Map<String, Object> result = new HashMap<>(1);
Map<String, Object> histograms = new HashMap<>(1);
Map<String, Map<String, Object>> stats = histogrammedCols.stream().collect(Collectors.toMap((m) -> m, (m) -> new HashMap<>(1)));
for (Metric<?> m : deequMetrics) {
String col = m.instance();
if (!histogrammedCols.contains(col)) {
continue;
}
String sparkMetric = DEEQU_TO_SPARK.get(m.name());
if (sparkMetric == null) {
continue;
}
if ("histogram".equalsIgnoreCase(sparkMetric)) {
// check we have real histogram
Distribution distribution = (Distribution) m.value().get();
Map<String, DistributionValue> binsAndValues = JavaConverters.mapAsJavaMapConverter(distribution.values()).asJava();
if (isDistributionHistogram(binsAndValues)) {
// this is values distribution. Let's calculate column type based on this information
String type = guessColumnTypeByDistribution(binsAndValues);
stats.get(col).put("type", type);
continue;
}
Object[][] histogram = distributionToHistogram(binsAndValues);
histograms.put(col, histogram);
// let's guess column type
// TODO: optimize
if (!stats.get(col).containsKey("type")) {
Object[] bins = histogram[1];
if (allBooleans(bins)) {
stats.get(col).put("type", "boolean");
} else if (allIntegers(bins)) {
stats.get(col).put("type", "integer");
} else if (allDoubles(bins)) {
stats.get(col).put("type", "double");
} else {
stats.get(col).put("type", "string");
}
}
continue;
}
stats.get(col).put(sparkMetric, m.value().get());
result.put(String.format("%s.%s.%s", dfName, col, sparkMetric), m.value().get());
}
result.put(String.format("%s.stats", dfName), stats);
if (!histograms.isEmpty()) {
result.put(String.format("%s.histograms", dfName), histograms);
}
return result;
}
public Object[][] distributionToHistogram(Map<String, DistributionValue> binsAndValues) {
Object[] bins = new Object[binsAndValues.size()];
Object[] values = new Object[binsAndValues.size()];
int i = 0;
for (Map.Entry<String, DistributionValue> entry : binsAndValues.entrySet()) {
bins[i] = entry.getKey();
values[i] = entry.getValue().absolute();
i++;
}
return new Object[][]{values, bins};
}
protected boolean isDistributionHistogram(Map<String, DistributionValue> binsAndValues) {
for (String type : DEEQU_TO_DBND.keySet()) {
if (!binsAndValues.containsKey(type)) {
return false;
}
}
return true;
}
protected String guessColumnTypeByDistribution(Map<String, DistributionValue> binsAndValues) {
for (Map.Entry<String, String> entry : DEEQU_TO_DBND.entrySet()) {
if (binsAndValues.get(entry.getKey()).absolute() > 1) {
return entry.getValue();
}
}
return "string";
}
protected boolean allBooleans(Object[] values) {
for (Object o : values) {
if (o != null && !"true".equalsIgnoreCase(o.toString()) && !"false".equalsIgnoreCase(o.toString())) {
return false;
}
}
return true;
}
protected boolean allIntegers(Object[] values) {
for (Object o : values) {
if (o != null && o.toString().contains(".")) {
return false;
}
if (o != null) {
try {
Integer.parseInt(o.toString());
} catch (NumberFormatException e) {
return false;
}
}
}
return true;
}
protected boolean allDoubles(Object[] values) {
for (Object o : values) {
if (o != null) {
try {
Double.parseDouble(o.toString());
} catch (NumberFormatException e) {
return false;
}
}
}
return true;
}
}
|
0
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-api-deequ/1.0.28.1/ai/databand/deequ/NoopMetricsRepository.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.deequ;
import com.amazon.deequ.analyzers.runners.AnalyzerContext;
import com.amazon.deequ.repository.MetricsRepository;
import com.amazon.deequ.repository.MetricsRepositoryMultipleResultsLoader;
import com.amazon.deequ.repository.ResultKey;
import scala.Option;
/**
* Default noop deequ metrics repository.
*/
public class NoopMetricsRepository implements MetricsRepository {
@Override
public void save(ResultKey resultKey, AnalyzerContext analyzerContext) {
}
@Override
public Option<AnalyzerContext> loadByKey(ResultKey resultKey) {
return Option.empty();
}
@Override
public MetricsRepositoryMultipleResultsLoader load() {
return null;
}
}
|
0
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/AgentAzkabanFlow.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.azkaban;
import ai.databand.azkaban.events.FlowRunnerContext;
import ai.databand.config.DbndConfig;
import ai.databand.id.Uuid5;
import ai.databand.log.TruncatedLog;
import ai.databand.schema.Pair;
import ai.databand.schema.TaskRun;
import azkaban.execapp.FlowRunner;
import azkaban.executor.ExecutableFlow;
import azkaban.executor.ExecutableNode;
import azkaban.executor.Status;
import azkaban.flow.Edge;
import azkaban.flow.Flow;
import azkaban.flow.Node;
import azkaban.utils.Props;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class AgentAzkabanFlow extends AzkabanFlow {
private final FlowRunnerContext ctx;
private final Map<String, String> flowProps;
private final DbndConfig config;
private final FlowRunner flowRunner;
private final Flow flow;
private final ExecutableFlow executableFlow;
public AgentAzkabanFlow(DbndConfig config, FlowRunnerContext ctx) {
super(ctx.links(), ctx.taskContext());
this.ctx = ctx;
this.config = config;
this.flowRunner = ctx.flowRunner();
this.executableFlow = ctx.executableFlow();
this.flow = ctx.flowDef();
this.flowProps = executableFlow.getInputProps().getFlattened();
}
@Override
public boolean isTrack() {
AzkabanDbndConfig azConfig = new AzkabanDbndConfig(config);
return azConfig.isTrackingEnabled(ctx.taskContext());
}
@Override
public String user() {
return executableFlow.getSubmitUser();
}
@Override
public String pipelineName() {
return ctx.pipelineName();
}
@Override
public Map<String, String> flowProps() {
return flowProps;
}
@Override
public String envName() {
return ctx.envName();
}
@Override
public String log() {
File logFile = flowRunner.getFlowLogFile();
return new TruncatedLog(config, logFile).toString();
}
@Override
public String state() {
if (Status.KILLED == executableFlow.getStatus() || Status.CANCELLED == executableFlow.getStatus()) {
return "CANCELLED";
}
return Status.SUCCEEDED == executableFlow.getStatus() ? "SUCCESS" : "FAILED";
}
@Override
public ZonedDateTime startDate() {
return Instant.ofEpochMilli(Long.parseLong(ctx.startTime())).atZone(ZoneOffset.UTC);
}
@Override
public List<Pair<String, Map<String, String>>> jobs() {
List<Pair<String, Map<String, String>>> result = new ArrayList<>(1);
try {
Method method = flowRunner.getClass().getDeclaredMethod("loadJobProps", ExecutableNode.class);
method.setAccessible(true);
for (Node node : flow.getNodes()) {
Props props = (Props) method.invoke(flowRunner, executableFlow.getExecutableNode(node.getId()));
result.add(new Pair<>(node.getId(), props.getFlattened()));
}
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) {
e.printStackTrace();
}
return result;
}
@Override
protected List<List<String>> buildJobUpstreamsMap(String jobId, TaskRun taskRun) {
Set<Edge> inEdges = flow.getInEdges(jobId);
List<List<String>> upstreamsMap = new ArrayList<>(1);
if (inEdges != null && !inEdges.isEmpty()) {
for (Edge edge : inEdges) {
String upstreamTaskRunUid = new Uuid5("TASK_RUN_UID", edge.getSourceId() + azCtx.flowUuid()).toString();
upstreamsMap.add(Arrays.asList(taskRun.getTaskRunUid(), upstreamTaskRunUid));
}
}
return upstreamsMap;
}
}
|
0
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/AgentAzkabanJob.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.azkaban;
import ai.databand.config.DbndConfig;
import ai.databand.log.TruncatedLog;
import azkaban.execapp.JobRunner;
import azkaban.executor.Status;
import java.io.File;
import java.time.Instant;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
public class AgentAzkabanJob extends AzkabanJob {
private final DbndConfig config;
private final JobRunner jobRunner;
private final boolean isFailed;
private final ZonedDateTime startDate;
public AgentAzkabanJob(DbndConfig config,
JobRunner jobRunner,
String startTime) {
this.config = config;
this.jobRunner = jobRunner;
this.isFailed = Status.SUCCEEDED != jobRunner.getNode().getStatus();
this.startDate = Instant.ofEpochMilli(Long.parseLong(startTime)).atZone(ZoneOffset.UTC);
}
@Override
public String state() {
Status status = jobRunner.getNode().getStatus();
if (Status.KILLED == status || Status.CANCELLED == status) {
return "CANCELLED";
}
return Status.SUCCEEDED == status ? "SUCCESS" : "FAILED";
}
@Override
public String log() {
File logFile = jobRunner.getLogFile();
return new TruncatedLog(config, logFile).toString();
}
@Override
public boolean isFailed() {
return isFailed;
}
@Override
public ZonedDateTime startDate() {
return startDate;
}
}
|
0
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
|
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/AgentAzkabanLinks.java
|
/*
* © Copyright Databand.ai, an IBM Company 2022
*/
package ai.databand.azkaban;
import ai.databand.azkaban.links.AzkabanLinks;
import ai.databand.azkaban.links.DefaultAzkabanLinks;
import azkaban.executor.ExecutableFlow;
import azkaban.server.AzkabanServer;
import azkaban.utils.Props;
import java.util.Map;
public class AgentAzkabanLinks implements AzkabanLinks {
private final AzkabanLinks origin;
public AgentAzkabanLinks(ExecutableFlow execFlow) {
Props systemProps = AzkabanServer.getAzkabanProperties();
String hostName = systemProps.get("server.hostname");
String port = systemProps.get("server.port");
String protocol = Boolean.TRUE.toString().equalsIgnoreCase(systemProps.get("jetty.use.ssl")) ? "https" : "http";
this.origin = new DefaultAzkabanLinks(
execFlow.getProjectName(),
execFlow.getId(),
String.valueOf(execFlow.getExecutionId()),
protocol,
hostName,
port
);
}
public Map<String, String> flowLinks() {
return origin.flowLinks();
}
public Map<String, String> jobLinks(String jobId) {
return origin.jobLinks(jobId);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.