index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/DisplayName.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.management.DescriptorKey; /** * DisplayName - This annotation allows to supply a display name for a method in the MBean * interface. */ @Documented @Target(ElementType.METHOD) @Retention(RetentionPolicy.RUNTIME) public @interface DisplayName { @DescriptorKey("displayName") String value(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxExecutionController.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import azkaban.executor.ExecutionController; import java.util.ArrayList; import java.util.List; /** * JMX for execution controller to monitor executions. */ public class JmxExecutionController implements JmxExecutionControllerMBean { private final ExecutionController controller; public JmxExecutionController(final ExecutionController controller) { this.controller = controller; } @Override public int getNumRunningFlows() { return this.controller.getRunningFlows().size(); } @Override public List<String> getPrimaryExecutorHostPorts() { return new ArrayList<>(this.controller.getPrimaryServerHosts()); } @Override public String getRunningFlows() { return this.controller.getRunningFlowIds().toString(); } @Override public String getQueuedFlows() { return this.controller.getQueuedFlowIds().toString(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxExecutionControllerMBean.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import java.util.List; /** * JMX API for execution controller to monitor executions. */ public interface JmxExecutionControllerMBean { @DisplayName("OPERATION: getNumRunningFlows") public int getNumRunningFlows(); @DisplayName("OPERATION: getRunningFlows") public String getRunningFlows(); @DisplayName("OPERATION: getPrimaryExecutorHostPorts") public List<String> getPrimaryExecutorHostPorts(); @DisplayName("OPERATION: getQueuedFlows") public String getQueuedFlows(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxExecutorManager.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import azkaban.executor.ExecutorManager; import java.util.ArrayList; import java.util.List; public class JmxExecutorManager implements JmxExecutorManagerMBean { private final ExecutorManager manager; public JmxExecutorManager(final ExecutorManager manager) { this.manager = manager; } @Override public int getNumRunningFlows() { return this.manager.getRunningFlows().size(); } @Override public String getExecutorThreadState() { return this.manager.getExecutorManagerThreadState().toString(); } @Override public String getExecutorThreadStage() { return this.manager.getExecutorThreadStage(); } @Override public boolean isThreadActive() { return this.manager.isExecutorManagerThreadActive(); } @Override public Long getLastThreadCheckTime() { return this.manager.getLastExecutorManagerThreadCheckTime(); } @Override public List<String> getPrimaryExecutorHostPorts() { return new ArrayList<>(this.manager.getPrimaryServerHosts()); } @Override public String getRunningFlows() { return this.manager.getRunningFlowIds(); } @Override public boolean isQueueProcessorActive() { return this.manager.isQueueProcessorThreadActive(); } @Override public String getQueuedFlows() { return this.manager.getQueuedFlowIds(); } @Override public String getQueueProcessorThreadState() { return this.manager.getQueueProcessorThreadState().toString(); } @Override public List<String> getAvailableExecutorComparatorNames() { return new ArrayList<>(this.manager.getAvailableExecutorComparatorNames()); } @Override public List<String> getAvailableExecutorFilterNames() { return new ArrayList<>(this.manager.getAvailableExecutorFilterNames()); } @Override public long getLastSuccessfulExecutorInfoRefresh() { return this.manager.getLastSuccessfulExecutorInfoRefresh(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxExecutorManagerMBean.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import java.util.List; public interface JmxExecutorManagerMBean { @DisplayName("OPERATION: getNumRunningFlows") public int getNumRunningFlows(); @DisplayName("OPERATION: getRunningFlows") public String getRunningFlows(); @DisplayName("OPERATION: getExecutorThreadState") public String getExecutorThreadState(); @DisplayName("OPERATION: getExecutorThreadStage") public String getExecutorThreadStage(); @DisplayName("OPERATION: isThreadActive") public boolean isThreadActive(); @DisplayName("OPERATION: getLastThreadCheckTime") public Long getLastThreadCheckTime(); @DisplayName("OPERATION: getPrimaryExecutorHostPorts") public List<String> getPrimaryExecutorHostPorts(); @DisplayName("OPERATION: isQueueProcessorActive") public boolean isQueueProcessorActive(); @DisplayName("OPERATION: getQueuedFlows") public String getQueuedFlows(); @DisplayName("OPERATION: getQueueProcessorThreadState") public String getQueueProcessorThreadState(); @DisplayName("OPERATION: getAvailableExecutorComparatorNames") List<String> getAvailableExecutorComparatorNames(); @DisplayName("OPERATION: getAvailableExecutorFilterNames") List<String> getAvailableExecutorFilterNames(); @DisplayName("OPERATION: getLastSuccessfulExecutorInfoRefresh") long getLastSuccessfulExecutorInfoRefresh(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxJettyServer.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import org.mortbay.jetty.Connector; import org.mortbay.jetty.Server; public class JmxJettyServer implements JmxJettyServerMBean { private final Server server; private final Connector connector; public JmxJettyServer(final Server server) { this.server = server; this.connector = server.getConnectors()[0]; } @Override public boolean isRunning() { return this.server.isRunning(); } @Override public boolean isFailed() { return this.server.isFailed(); } @Override public boolean isStopped() { return this.server.isStopped(); } @Override public int getNumThreads() { return this.server.getThreadPool().getThreads(); } @Override public int getNumIdleThreads() { return this.server.getThreadPool().getIdleThreads(); } @Override public String getHost() { return this.connector.getHost(); } @Override public int getPort() { return this.connector.getPort(); } @Override public int getConfidentialPort() { return this.connector.getConfidentialPort(); } @Override public int getConnections() { return this.connector.getConnections(); } @Override public int getConnectionsOpen() { return this.connector.getConnectionsOpen(); } @Override public int getConnectionsOpenMax() { return this.connector.getConnectionsOpenMax(); } @Override public int getConnectionsOpenMin() { return this.connector.getConnectionsOpenMin(); } @Override public long getConnectionsDurationAve() { return this.connector.getConnectionsDurationAve(); } @Override public long getConnectionsDurationMax() { return this.connector.getConnectionsDurationMax(); } @Override public long getConnectionsDurationMin() { return this.connector.getConnectionsDurationMin(); } @Override public long getConnectionsDurationTotal() { return this.connector.getConnectionsDurationTotal(); } @Override public long getConnectionsRequestAve() { return this.connector.getConnectionsRequestsAve(); } @Override public long getConnectionsRequestMax() { return this.connector.getConnectionsRequestsMax(); } @Override public long getConnectionsRequestMin() { return this.connector.getConnectionsRequestsMin(); } @Override public void turnStatsOn() { this.connector.setStatsOn(true); } @Override public void turnStatsOff() { this.connector.setStatsOn(false); } @Override public void resetStats() { this.connector.statsReset(); } @Override public boolean isStatsOn() { return this.connector.getStatsOn(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxJettyServerMBean.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; public interface JmxJettyServerMBean { @DisplayName("OPERATION: isRunning") public boolean isRunning(); @DisplayName("OPERATION: isFailed") public boolean isFailed(); @DisplayName("OPERATION: isStopped") public boolean isStopped(); @DisplayName("OPERATION: getNumThreads") public int getNumThreads(); @DisplayName("OPERATION: getNumIdleThreads") public int getNumIdleThreads(); @DisplayName("OPERATION: getHost") public String getHost(); @DisplayName("OPERATION: getPort") public int getPort(); @DisplayName("OPERATION: getConfidentialPort") public int getConfidentialPort(); @DisplayName("OPERATION: getConnections") public int getConnections(); @DisplayName("OPERATION: getConnectionsOpen") public int getConnectionsOpen(); @DisplayName("OPERATION: getConnectionsOpenMax") public int getConnectionsOpenMax(); @DisplayName("OPERATION: getConnectionsOpenMin") public int getConnectionsOpenMin(); @DisplayName("OPERATION: getConnectionsDurationAve") public long getConnectionsDurationAve(); @DisplayName("OPERATION: getConnectionsDurationMax") public long getConnectionsDurationMax(); @DisplayName("OPERATION: getConnectionsDurationMin") public long getConnectionsDurationMin(); @DisplayName("OPERATION: getConnectionsDurationTotal") public long getConnectionsDurationTotal(); @DisplayName("OPERATION: getConnectionsRequestAve") public long getConnectionsRequestAve(); @DisplayName("OPERATION: getConnectionsRequestMax") public long getConnectionsRequestMax(); @DisplayName("OPERATION: getConnectionsRequestMin") public long getConnectionsRequestMin(); @DisplayName("OPERATION: turnStatsOn") public void turnStatsOn(); @DisplayName("OPERATION: turnStatsOff") public void turnStatsOff(); @DisplayName("OPERATION: resetStats") public void resetStats(); @DisplayName("OPERATION: isStatsOn") public boolean isStatsOn(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxTriggerManager.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; import azkaban.trigger.TriggerManagerAdapter; import azkaban.trigger.TriggerManagerAdapter.TriggerJMX; public class JmxTriggerManager implements JmxTriggerManagerMBean { private final TriggerJMX jmxStats; public JmxTriggerManager(final TriggerManagerAdapter manager) { this.jmxStats = manager.getJMX(); } @Override public long getLastRunnerThreadCheckTime() { return this.jmxStats.getLastRunnerThreadCheckTime(); } @Override public boolean isRunnerThreadActive() { return this.jmxStats.isRunnerThreadActive(); } @Override public String getPrimaryTriggerHostPort() { return this.jmxStats.getPrimaryServerHost(); } // @Override // public List<String> getAllTriggerHostPorts() { // return new ArrayList<String>(manager.getAllActiveTriggerServerHosts()); // } @Override public int getNumTriggers() { return this.jmxStats.getNumTriggers(); } @Override public String getTriggerSources() { return this.jmxStats.getTriggerSources(); } @Override public String getTriggerIds() { return this.jmxStats.getTriggerIds(); } @Override public long getScannerIdleTime() { return this.jmxStats.getScannerIdleTime(); } @Override public String getScannerThreadStage() { return this.jmxStats.getScannerThreadStage(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxTriggerManagerMBean.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; public interface JmxTriggerManagerMBean { @DisplayName("OPERATION: getLastThreadCheckTime") public long getLastRunnerThreadCheckTime(); @DisplayName("OPERATION: isThreadActive") public boolean isRunnerThreadActive(); @DisplayName("OPERATION: getPrimaryTriggerHostPort") public String getPrimaryTriggerHostPort(); // @DisplayName("OPERATION: getAllTriggerHostPorts") // public List<String> getAllTriggerHostPorts(); @DisplayName("OPERATION: getNumTriggers") public int getNumTriggers(); @DisplayName("OPERATION: getTriggerSources") public String getTriggerSources(); @DisplayName("OPERATION: getTriggerIds") public String getTriggerIds(); @DisplayName("OPERATION: getScannerIdleTime") public long getScannerIdleTime(); @DisplayName("OPERATION: getScannerThreadStage") public String getScannerThreadStage(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/JmxTriggerRunnerManagerMBean.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; public interface JmxTriggerRunnerManagerMBean { @DisplayName("OPERATION: getLastRunnerThreadCheckTime") public long getLastRunnerThreadCheckTime(); @DisplayName("OPERATION: getNumTriggers") public int getNumTriggers(); @DisplayName("OPERATION: isRunnerThreadActive") public boolean isRunnerThreadActive(); @DisplayName("OPERATION: getTriggerSources") public String getTriggerSources(); @DisplayName("OPERATION: getTriggerIds") public String getTriggerIds(); @DisplayName("OPERATION: getScannerIdleTime") public long getScannerIdleTime(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jmx/ParameterName.java
/* * Copyright 2011 Adconion, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jmx; /** * ParameterName - This annotation allows to supply a parameter name for a method in the MBean * interface. */ import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.management.DescriptorKey; @Documented @Target(ElementType.PARAMETER) @Retention(RetentionPolicy.RUNTIME) public @interface ParameterName { @DescriptorKey("parameterName") String value(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/AbstractJob.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import azkaban.utils.Props; import org.apache.log4j.Logger; /** * Base Job */ public abstract class AbstractJob implements Job { public static final String JOB_TYPE = "type"; public static final String JOB_CLASS = "job.class"; public static final String JOB_PATH = "job.path"; public static final String JOB_FULLPATH = "job.fullpath"; public static final String JOB_ID = "job.id"; private final String _id; private final Logger _log; private volatile double _progress; protected AbstractJob(final String id, final Logger log) { this._id = id; this._log = log; this._progress = 0.0; } @Override public String getId() { return this._id; } @Override public double getProgress() { return this._progress; } public void setProgress(final double progress) { this._progress = progress; } @Override public void cancel() throws Exception { throw new RuntimeException("Job " + this._id + " does not support cancellation!"); } @Override public Props getJobGeneratedProperties() { return new Props(); } @Override public abstract void run() throws Exception; @Override public boolean isCanceled() { return false; } public Logger getLog() { return this._log; } public void debug(final String message) { this._log.debug(message); } public void debug(final String message, final Throwable t) { this._log.debug(message, t); } public void info(final String message) { this._log.info(message); } public void info(final String message, final Throwable t) { this._log.info(message, t); } public void warn(final String message) { this._log.warn(message); } public void warn(final String message, final Throwable t) { this._log.warn(message, t); } public void error(final String message) { this._log.error(message); } public void error(final String message, final Throwable t) { this._log.error(message, t); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/AbstractProcessJob.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import azkaban.utils.JSONUtils; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Utils; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.Map; import org.apache.commons.fileupload.util.Streams; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; /** * A revised process-based job */ public abstract class AbstractProcessJob extends AbstractJob { public static final String ENV_PREFIX = "env."; public static final String WORKING_DIR = "working.dir"; public static final String JOB_PROP_ENV = "JOB_PROP_FILE"; public static final String JOBTYPE_PRIVATE_PROP_ENV = "JOBTYPE_PRIVATE_PROP_FILE"; public static final String JOB_NAME_ENV = "JOB_NAME"; public static final String JOB_OUTPUT_PROP_FILE = "JOB_OUTPUT_PROP_FILE"; private static final String SENSITIVE_JOB_PROP_NAME_SUFFIX = "_X"; private static final String SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER = "[MASKED]"; private static final String JOB_DUMP_PROPERTIES_IN_LOG = "job.dump.properties"; //Notes: These variables will be accessed directly throw inherited classes, // which are out of this package. Please remain them to be protected protected final String _jobPath; protected String _cwd; protected volatile Props jobProps; protected volatile Props sysProps; protected volatile Props privateProps; private volatile Props generatedProperties; protected AbstractProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Logger log) { super(jobId, log); this.jobProps = jobProps; this.sysProps = sysProps; this.privateProps = null; this._cwd = getWorkingDirectory(); this._jobPath = this._cwd; } protected AbstractProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Props privateProps, final Logger log) { super(jobId, log); this.jobProps = jobProps; this.sysProps = sysProps; this.privateProps = privateProps; this._cwd = getWorkingDirectory(); this._jobPath = this._cwd; } //protected AbstractProcessJob(final String) /** * This public function will be deprecated since it tends to be a Utility Function * Please use azkaban.utils.FileIOUtils.createOutputPropsFile(String, String, String) instead. */ @Deprecated public File createOutputPropsFile(final String id, final String workingDir) { this.info("cwd=" + workingDir); try { final File directory = new File(workingDir); final File tempFile = File.createTempFile(id + "_output_", "_tmp", directory); return tempFile; } catch (final IOException e) { this.error("Failed to create temp output property file :", e); throw new RuntimeException("Failed to create temp output property file ", e); } } public Props getJobProps() { return this.jobProps; } public Props getSysProps() { return this.sysProps; } /** * Re-configure Job Props * @param props new props */ public void setJobProps(Props props) { this.jobProps = props; } /** * Re-configure System Props * @param props props */ public void setSysProps(Props props) { this.sysProps = props; } public Props getAllProps() { Props props = new Props(); props.putAll(jobProps); props.putAll(sysProps); return appendExtraProps(props); } public Props appendExtraProps(Props props) { return props; } public String getJobPath() { return this._jobPath; } protected void resolveProps() { this.jobProps = PropsUtils.resolveProps(this.jobProps); } /** * prints the current Job props to the Job log. */ protected void logJobProperties() { if (this.jobProps != null && this.jobProps.getBoolean(JOB_DUMP_PROPERTIES_IN_LOG, false)) { try { final Map<String, String> flattenedProps = this.jobProps.getFlattened(); this.info("****** Job properties ******"); this.info(String.format("- Note : value is masked if property name ends with '%s'.", SENSITIVE_JOB_PROP_NAME_SUFFIX)); for (final Map.Entry<String, String> entry : flattenedProps.entrySet()) { final String key = entry.getKey(); final String value = key.endsWith(SENSITIVE_JOB_PROP_NAME_SUFFIX) ? SENSITIVE_JOB_PROP_VALUE_PLACEHOLDER : entry.getValue(); this.info(String.format("%s=%s", key, value)); } this.info("****** End Job properties ******"); } catch (final Exception ex) { this.error("failed to log job properties ", ex); } } } @Override public Props getJobGeneratedProperties() { return this.generatedProperties; } /** * initialize temporary and final property file * * @return {tmpPropFile, outputPropFile} */ public File[] initPropsFiles() { // Create job properties file with additionally all input generated properties. final File[] files = new File[3]; files[0] = createFlattenedPropsFile(this.jobProps, this._cwd, "_job_props_"); this.jobProps.put(ENV_PREFIX + JOB_PROP_ENV, files[0].getAbsolutePath()); this.jobProps.put(ENV_PREFIX + JOB_NAME_ENV, getId()); files[1] = createOutputPropsFile(getId(), this._cwd); this.jobProps.put(ENV_PREFIX + JOB_OUTPUT_PROP_FILE, files[1].getAbsolutePath()); // Create job's private properties file. if (this.privateProps != null) { files[2] = createFlattenedPropsFile(this.privateProps, this._cwd, "_job_private_props_"); this.jobProps.put(ENV_PREFIX + JOBTYPE_PRIVATE_PROP_ENV, files[2].getAbsolutePath()); } return files; } public String getCwd() { return this._cwd; } /** * Get Environment Variables from the Job Properties Table * * @return All Job Properties with "env." prefix */ public Map<String, String> getEnvironmentVariables() { final Props props = getJobProps(); final Map<String, String> envMap = props.getMapByPrefix(ENV_PREFIX); return envMap; } /** * Get Working Directory from Job Properties when it is presented. Otherwise, the working * directory is the jobPath * * @return working directory property */ public String getWorkingDirectory() { final String workingDir = getJobProps().getString(WORKING_DIR, this._jobPath); return Utils.ifNull(workingDir, ""); } /** * This public function will be deprecated since it tends to be a Utility function * Please use azkaban.utils.FileIOUtils.loadOutputFileProps(String file) instead. */ @Deprecated public Props loadOutputFileProps(final File outputPropertiesFile) { InputStream reader = null; try { this.info("output properties file=" + outputPropertiesFile.getAbsolutePath()); reader = new BufferedInputStream(new FileInputStream(outputPropertiesFile)); final Props outputProps = new Props(); final String content = Streams.asString(reader).trim(); if (!content.isEmpty()) { final Map<String, Object> propMap = (Map<String, Object>) JSONUtils.parseJSONFromString(content); for (final Map.Entry<String, Object> entry : propMap.entrySet()) { outputProps.put(entry.getKey(), entry.getValue().toString()); } } return outputProps; } catch (final FileNotFoundException e) { this.info( String.format("File[%s] wasn't found, returning empty props.", outputPropertiesFile)); return new Props(); } catch (final Exception e) { this.error( "Exception thrown when trying to load output file props. Returning empty Props instead of failing. Is this really the best thing to do?", e); return new Props(); } finally { IOUtils.closeQuietly(reader); } } /** * This public function will be deprecated since it tends to be a Utility function * Please use azkaban.utils.FileIOUtils.createOutputPropsFile(String, String, String) instead. */ @Deprecated private File createFlattenedPropsFile(final Props props, final String workingDir, String propsName) { try { final File directory = new File(workingDir); // The temp file prefix must be at least 3 characters. final File tempFile = File.createTempFile(getId() + propsName, "_tmp", directory); props.storeFlattened(tempFile); return tempFile; } catch (final IOException e) { throw new RuntimeException("Failed to create temp property file. workingDir = " + workingDir); } } /** * Generate properties from output file and set to props tables * * @param outputFile explain */ protected void generateProperties(final File outputFile) { this.generatedProperties = loadOutputFileProps(outputFile); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/JavaProcessJob.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import azkaban.server.AzkabanServer; import azkaban.utils.MemConfValue; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.Utils; import java.io.File; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.apache.log4j.Logger; public class JavaProcessJob extends ProcessJob { public static final String CLASSPATH = "classpath"; public static final String GLOBAL_CLASSPATH = "global.classpaths"; public static final String JAVA_CLASS = "java.class"; public static final String INITIAL_MEMORY_SIZE = "Xms"; public static final String MAX_MEMORY_SIZE = "Xmx"; public static final String MAIN_ARGS = "main.args"; public static final String JVM_PARAMS = "jvm.args"; public static final String GLOBAL_JVM_PARAMS = "global.jvm.args"; public static final String DEPENDENCY_CLS_RAMP_PROP_PREFIX = "azkaban.ramp.jar:"; public static final String DEPENDENCY_REG_RAMP_PROP_PREFIX = "azkaban.ramp.reg:"; public static final String DEPENDENCY_CFG_RAMP_PROP_PREFIX = "azkaban.ramp.cfg:"; public static final String DEFAULT_INITIAL_MEMORY_SIZE = "64M"; public static final String DEFAULT_MAX_MEMORY_SIZE = "256M"; public static String JAVA_COMMAND = "java"; public JavaProcessJob(final String jobid, final Props sysProps, final Props jobProps, final Logger logger) { super(jobid, sysProps, jobProps, logger); } public JavaProcessJob(final String jobid, final Props sysProps, final Props jobProps, final Props privateProps, final Logger logger) { super(jobid, sysProps, jobProps, privateProps, logger); } @Override protected List<String> getCommandList() { final ArrayList<String> list = new ArrayList<>(); list.add(createCommandLine()); return list; } protected String createCommandLine() { String command = JAVA_COMMAND + " "; command += getJVMArguments() + " "; command += "-Xms" + getInitialMemorySize() + " "; command += "-Xmx" + getMaxMemorySize() + " "; command += getClassPathParam(); command += getJavaClass() + " "; command += getMainArguments(); return command; } protected String getJavaClass() { return getJobProps().getString(JAVA_CLASS); } protected String getClassPathParam() { final List<String> classPath = getClassPaths(); if (classPath == null || classPath.size() == 0) { throw new IllegalArgumentException( "No classpath defined and no .jar files found in job directory. Can't run java command."); } return "-cp " + createArguments(classPath, ":") + " "; } protected List<String> getClassPaths() { final List<String> classPaths = getJobProps().getStringList(CLASSPATH, null, ","); final ArrayList<String> classpathList = new ArrayList<>(); // Adding global properties used system wide. if (getJobProps().containsKey(GLOBAL_CLASSPATH)) { final List<String> globalClasspath = getJobProps().getStringList(GLOBAL_CLASSPATH); for (final String global : globalClasspath) { getLog().info("Adding to global classpath:" + global); classpathList.add(global); } } if (classPaths == null || classPaths.isEmpty()) { final File path = new File(getPath()); getLog().info( "No classpath specified. Trying to load classes from " + path); if (path != null) { for (final File file : path.listFiles()) { if (file.getName().endsWith(".jar")) { classpathList.add(file.getName()); } } } } else { classpathList.addAll(classPaths); } return classpathList; } protected Map<String, String> getRampItems(String prefix) { Map<String, String> rampItems = getJobProps() .getKeySet() .stream() .filter(propKey -> propKey.startsWith(prefix)) .collect(Collectors.toMap( key -> key, key -> getJobProps().get(key) )); if (!rampItems.isEmpty()) { getLog().info(String.format("[Ramp Items] : %s", rampItems.toString())); } return rampItems; } protected List<String> mergeSysTypeClassPaths(List<String> classPath) { Utils.mergeTypeClassPaths(classPath, getSysProps().getStringList("jobtype.classpath", null, ","), getSysProps().get("plugin.dir")); Utils.mergeStringList(classPath, getSysProps().getStringList("jobtype.global.classpath", null, ",")); return classPath; } protected String getInitialMemorySize() { return getJobProps().getString(INITIAL_MEMORY_SIZE, DEFAULT_INITIAL_MEMORY_SIZE); } protected String getMaxMemorySize() { return getJobProps().getString(MAX_MEMORY_SIZE, DEFAULT_MAX_MEMORY_SIZE); } protected String getMainArguments() { return getJobProps().getString(MAIN_ARGS, ""); } protected String getJVMArguments() { final String globalJVMArgs = getJobProps().getString(GLOBAL_JVM_PARAMS, null); if (globalJVMArgs == null) { return getJobProps().getString(JVM_PARAMS, ""); } return globalJVMArgs + " " + getJobProps().getString(JVM_PARAMS, ""); } protected String createArguments(final List<String> arguments, final String separator) { if (arguments != null && arguments.size() > 0) { String param = ""; for (final String arg : arguments) { param += arg + separator; } return param.substring(0, param.length() - 1); } return ""; } @Override protected Pair<Long, Long> getProcMemoryRequirement() throws Exception { final String strXms = getInitialMemorySize(); final String strXmx = getMaxMemorySize(); final long xms = Utils.parseMemString(strXms); final long xmx = Utils.parseMemString(strXmx); final Props azkabanProperties = AzkabanServer.getAzkabanProperties(); if (azkabanProperties != null) { final MemConfValue maxXms = MemConfValue.parseMaxXms(azkabanProperties); final MemConfValue maxXmx = MemConfValue.parseMaxXmx(azkabanProperties); if (xms > maxXms.getSize()) { throw new Exception( String.format("%s: Xms value has exceeded the allowed limit (max Xms = %s)", getId(), maxXms.getString())); } if (xmx > maxXmx.getSize()) { throw new Exception( String.format("%s: Xmx value has exceeded the allowed limit (max Xmx = %s)", getId(), maxXmx.getString())); } } return new Pair<>(xms, xmx); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/Job.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import azkaban.utils.Props; /** * Raw job interface. * * A job is unit of work to perform. * * A job is required to have a constructor Job(String jobId, Props props) */ public interface Job { /** * Returns a unique(should be checked in xml) string name/id for the Job. */ public String getId(); /** * Run the job. In general this method can only be run once. Must either succeed or throw an * exception. */ public void run() throws Exception; /** * Best effort attempt to cancel the job. * * @throws Exception If cancel fails */ public void cancel() throws Exception; /** * Returns a progress report between [0 - 1.0] to indicate the percentage complete * * @throws Exception If getting progress fails */ public double getProgress() throws Exception; /** * Get the generated properties from this job. */ public Props getJobGeneratedProperties(); /** * Determine if the job was cancelled. */ public boolean isCanceled(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/NoopJob.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import azkaban.utils.Props; import org.apache.log4j.Logger; /** * A no-op job. */ public class NoopJob implements Job { private final String jobId; public NoopJob(final String jobid, final Props props, final Props jobProps, final Logger log) { this.jobId = jobid; } @Override public String getId() { return this.jobId; } @Override public void run() throws Exception { } @Override public void cancel() throws Exception { } @Override public double getProgress() throws Exception { return 0; } @Override public Props getJobGeneratedProperties() { return new Props(); } @Override public boolean isCanceled() { return false; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/ProcessJob.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor; import static azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_GROUP_NAME; import static azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_NATIVE_LIB_FOLDER; import static azkaban.ServiceProvider.SERVICE_PROVIDER; import azkaban.Constants; import azkaban.Constants.JobProperties; import azkaban.flow.CommonJobProperties; import azkaban.jobExecutor.utils.process.AzkabanProcess; import azkaban.jobExecutor.utils.process.AzkabanProcessBuilder; import azkaban.metrics.CommonMetrics; import azkaban.utils.ExecuteAsUser; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.SystemMemoryInfo; import azkaban.utils.Utils; import com.google.common.annotations.VisibleForTesting; import java.io.File; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; import org.apache.log4j.Logger; /** * A job that runs a simple unix command */ public class ProcessJob extends AbstractProcessJob { public static final String COMMAND = "command"; public static final String AZKABAN_MEMORY_CHECK = "azkaban.memory.check"; // Use azkaban.Constants.ConfigurationKeys.AZKABAN_SERVER_NATIVE_LIB_FOLDER instead @Deprecated public static final String NATIVE_LIB_FOLDER = "azkaban.native.lib"; public static final String EXECUTE_AS_USER = "execute.as.user"; public static final String KRB5CCNAME = "KRB5CCNAME"; private static final Duration KILL_TIME = Duration.ofSeconds(30); private static final String MEMCHECK_ENABLED = "memCheck.enabled"; private static final String CHOWN = "/bin/chown"; private static final String CREATE_FILE = "touch"; private static final int SUCCESSFUL_EXECUTION = 0; private static final String TEMP_FILE_NAME = "user_can_write"; private final CommonMetrics commonMetrics; private volatile AzkabanProcess process; private volatile boolean killed = false; // For testing only. True if the job process exits successfully. private volatile boolean success; public ProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Logger log) { super(jobId, sysProps, jobProps, log); // TODO: reallocf fully guicify CommonMetrics through ProcessJob dependents this.commonMetrics = SERVICE_PROVIDER.getInstance(CommonMetrics.class); } public ProcessJob(final String jobId, final Props sysProps, final Props jobProps, final Props privateProps, final Logger log) { super(jobId, sysProps, jobProps, privateProps, log); // TODO: reallocf fully guicify CommonMetrics through ProcessJob dependents this.commonMetrics = SERVICE_PROVIDER.getInstance(CommonMetrics.class); } /** * Splits the command into a unix like command line structure. Quotes and single quotes are * treated as nested strings. */ public static String[] partitionCommandLine(final String command) { final ArrayList<String> commands = new ArrayList<>(); int index = 0; StringBuffer buffer = new StringBuffer(command.length()); boolean isApos = false; boolean isQuote = false; while (index < command.length()) { final char c = command.charAt(index); switch (c) { case ' ': if (!isQuote && !isApos) { final String arg = buffer.toString(); buffer = new StringBuffer(command.length() - index); if (arg.length() > 0) { commands.add(arg); } } else { buffer.append(c); } break; case '\'': if (!isQuote) { isApos = !isApos; } else { buffer.append(c); } break; case '"': if (!isApos) { isQuote = !isQuote; } else { buffer.append(c); } break; default: buffer.append(c); } index++; } if (buffer.length() > 0) { final String arg = buffer.toString(); commands.add(arg); } return commands.toArray(new String[commands.size()]); } @Override public void run() throws Exception { try { resolveProps(); } catch (final Exception e) { handleError("Bad property definition! " + e.getMessage(), e); } if (this.getSysProps().getBoolean(MEMCHECK_ENABLED, true) && this.getJobProps().getBoolean(AZKABAN_MEMORY_CHECK, true)) { final Pair<Long, Long> memPair = getProcMemoryRequirement(); final long xms = memPair.getFirst(); final long xmx = memPair.getSecond(); // retry backoff in ms final String oomMsg = String .format("Cannot request memory (Xms %d kb, Xmx %d kb) from system for job %s", xms, xmx, getId()); int attempt; boolean isMemGranted = true; //todo HappyRay: move to proper Guice after this class is refactored. final SystemMemoryInfo memInfo = SERVICE_PROVIDER.getInstance(SystemMemoryInfo.class); for (attempt = 1; attempt <= Constants.MEMORY_CHECK_RETRY_LIMIT; attempt++) { isMemGranted = memInfo.canSystemGrantMemory(xmx); if (isMemGranted) { info(String.format("Memory granted for job %s", getId())); if (attempt > 1) { this.commonMetrics.decrementOOMJobWaitCount(); } break; } if (attempt < Constants.MEMORY_CHECK_RETRY_LIMIT) { info(String.format(oomMsg + ", sleep for %s secs and retry, attempt %s of %s", TimeUnit.MILLISECONDS.toSeconds( Constants.MEMORY_CHECK_INTERVAL_MS), attempt, Constants.MEMORY_CHECK_RETRY_LIMIT)); if (attempt == 1) { this.commonMetrics.incrementOOMJobWaitCount(); } synchronized (this) { try { this.wait(Constants.MEMORY_CHECK_INTERVAL_MS); } catch (final InterruptedException e) { info(String .format("Job %s interrupted while waiting for memory check retry", getId())); } } if (this.killed) { this.commonMetrics.decrementOOMJobWaitCount(); info(String.format("Job %s was killed while waiting for memory check retry", getId())); return; } } } if (!isMemGranted) { this.commonMetrics.decrementOOMJobWaitCount(); handleError(oomMsg, null); } } List<String> commands = null; try { commands = getCommandList(); } catch (final Exception e) { handleError("Job set up failed: " + e.getMessage(), e); } final long startMs = System.currentTimeMillis(); if (commands == null) { handleError("There are no commands to execute", null); } info(commands.size() + " commands to execute."); final File[] propFiles = initPropsFiles(); // change krb5ccname env var so that each job execution gets its own cache final Map<String, String> envVars = getEnvironmentVariables(); envVars.put(KRB5CCNAME, getKrb5ccname(this.getJobProps())); // determine whether to run as Azkaban or run as effectiveUser, // by default, run as effectiveUser String executeAsUserBinaryPath = null; String effectiveUser = null; final boolean isExecuteAsUser = this.getSysProps().getBoolean(EXECUTE_AS_USER, true); //Get list of users we never execute flows as. (ie: root, azkaban) final Set<String> blackListedUsers = new HashSet<>( Arrays.asList( this.getSysProps() .getString(Constants.ConfigurationKeys.BLACK_LISTED_USERS, "root,azkaban") .split(",") ) ); // nativeLibFolder specifies the path for execute-as-user file, // which will change user from Azkaban to effectiveUser if (isExecuteAsUser) { final String nativeLibFolder = this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER); executeAsUserBinaryPath = String.format("%s/%s", nativeLibFolder, "execute-as-user"); effectiveUser = getEffectiveUser(this.getJobProps()); // Throw exception if Azkaban tries to run flow as a prohibited user if (blackListedUsers.contains(effectiveUser)) { throw new RuntimeException( String.format("Not permitted to proxy as '%s' through Azkaban", effectiveUser) ); } // Set parent directory permissions to <uid>:azkaban so user can write in their execution directory // if the directory is not permissioned correctly already (should happen once per execution) if (!canWriteInCurrentWorkingDirectory(effectiveUser)) { info("Changing current working directory ownership"); assignUserFileOwnership(effectiveUser, getWorkingDirectory()); } // Set property file permissions to <uid>:azkaban so user can write to their prop files // in order to pass properties from one job to another, except the last one for (int i = 0; i < 2; i++) { info("Changing properties files ownership"); assignUserFileOwnership(effectiveUser, propFiles[i].getAbsolutePath()); } } for (String command : commands) { AzkabanProcessBuilder builder = null; if (isExecuteAsUser) { command = String.format("%s %s %s", executeAsUserBinaryPath, effectiveUser, command); info("Command: " + command); builder = new AzkabanProcessBuilder(partitionCommandLine(command)) .setEnv(envVars).setWorkingDir(getCwd()).setLogger(getLog()) .enableExecuteAsUser().setExecuteAsUserBinaryPath(executeAsUserBinaryPath) .setEffectiveUser(effectiveUser); } else { info("Command: " + command); builder = new AzkabanProcessBuilder(partitionCommandLine(command)) .setEnv(envVars).setWorkingDir(getCwd()).setLogger(getLog()); } if (builder.getEnv().size() > 0) { info("Environment variables: " + builder.getEnv()); } info("Working directory: " + builder.getWorkingDir()); // print out the Job properties to the job log. this.logJobProperties(); synchronized (this) { // Make sure that checking if the process job is killed and creating an AzkabanProcess // object are atomic. The cancel method relies on this to make sure that if this.process is // not null, this block of code which includes checking if the job is killed has not been // executed yet. if (this.killed) { info("The job is killed. Abort. No job process created."); return; } this.process = builder.build(); } try { this.process.run(); this.success = true; } catch (final Throwable e) { for (final File file : propFiles) { if (file != null && file.exists()) { file.delete(); } } throw new RuntimeException(e); } finally { info("Process with id " + this.process.getProcessId() + " completed " + (this.success ? "successfully" : "unsuccessfully") + " in " + ((System.currentTimeMillis() - startMs) / 1000) + " seconds."); } } // Get the output properties from this job. generateProperties(propFiles[1]); } /** * <pre> * This method extracts the kerberos ticket cache file name from the jobprops. * This method will ensure that each job execution will have its own kerberos ticket cache file * Given that the code only sets an environmental variable, the number of files created * corresponds * to the number of processes that are doing kinit in their flow, which should not be an * inordinately * high number. * </pre> * * @return file name: the kerberos ticket cache file to use */ private String getKrb5ccname(final Props jobProps) { final String effectiveUser = getEffectiveUser(jobProps); final String projectName = jobProps.getString(CommonJobProperties.PROJECT_NAME).replace(" ", "_"); final String flowId = jobProps.getString(CommonJobProperties.FLOW_ID).replace(" ", "_"); final String jobId = jobProps.getString(CommonJobProperties.JOB_ID).replace(" ", "_"); // execId should be an int and should not have space in it, ever final String execId = jobProps.getString(CommonJobProperties.EXEC_ID); final String krb5ccname = String.format("/tmp/krb5cc__%s__%s__%s__%s__%s", projectName, flowId, jobId, execId, effectiveUser); return krb5ccname; } /** * <pre> * Determines what user id should the process job run as, in the following order of precedence: * 1. USER_TO_PROXY * 2. SUBMIT_USER * </pre> * * @return the user that Azkaban is going to execute as */ private String getEffectiveUser(final Props jobProps) { String effectiveUser = null; if (jobProps.containsKey(JobProperties.USER_TO_PROXY)) { effectiveUser = jobProps.getString(JobProperties.USER_TO_PROXY); } else if (jobProps.containsKey(CommonJobProperties.SUBMIT_USER)) { effectiveUser = jobProps.getString(CommonJobProperties.SUBMIT_USER); } else { throw new RuntimeException( "Internal Error: No user.to.proxy or submit.user in the jobProps"); } info("effective user is: " + effectiveUser); return effectiveUser; } /** * Checks to see if user has write access to current working directory which many users need for * their jobs to store temporary data/jars on the executor. * * Accomplishes this by using execute-as-user to try to create an empty file in the cwd. * * @param effectiveUser user/proxy user running the job * @return true if user has write permissions in current working directory otherwise false */ private boolean canWriteInCurrentWorkingDirectory(final String effectiveUser) throws IOException { final ExecuteAsUser executeAsUser = new ExecuteAsUser( this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER)); final List<String> checkIfUserCanWriteCommand = Arrays .asList(CREATE_FILE, getWorkingDirectory() + "/" + TEMP_FILE_NAME); final int result = executeAsUser.execute(effectiveUser, checkIfUserCanWriteCommand); return result == SUCCESSFUL_EXECUTION; } /** * Changes permissions on file/directory so that the file/directory is owned by the user and the * group remains the azkaban service account name. * * Leverages execute-as-user with "root" as the user to run the command. * * @param effectiveUser user/proxy user running the job * @param fileName the name of the file whose permissions will be changed */ private void assignUserFileOwnership(final String effectiveUser, final String fileName) throws Exception { final ExecuteAsUser executeAsUser = new ExecuteAsUser( this.getSysProps().getString(AZKABAN_SERVER_NATIVE_LIB_FOLDER)); final String groupName = this.getSysProps().getString(AZKABAN_SERVER_GROUP_NAME, "azkaban"); final List<String> changeOwnershipCommand = Arrays .asList(CHOWN, effectiveUser + ":" + groupName, fileName); info("Change ownership of " + fileName + " to " + effectiveUser + ":" + groupName + "."); final int result = executeAsUser.execute("root", changeOwnershipCommand); if (result != 0) { handleError("Failed to change current working directory ownership. Error code: " + Integer .toString(result), null); } } /** * This is used to get the min/max memory size requirement by processes. SystemMemoryInfo can use * the info to determine if the memory request can be fulfilled. For Java process, this should be * Xms/Xmx setting. * * @return pair of min/max memory size */ protected Pair<Long, Long> getProcMemoryRequirement() throws Exception { return new Pair<>(0L, 0L); } protected void handleError(final String errorMsg, final Exception e) throws Exception { error(errorMsg); if (e != null) { throw new Exception(errorMsg, e); } else { throw new Exception(errorMsg); } } protected List<String> getCommandList() { final List<String> commands = new ArrayList<>(); commands.add(this.getJobProps().getString(COMMAND)); for (int i = 1; this.getJobProps().containsKey(COMMAND + "." + i); i++) { commands.add(this.getJobProps().getString(COMMAND + "." + i)); } return commands; } @Override public void cancel() throws InterruptedException { // in case the job is waiting synchronized (this) { this.killed = true; this.notify(); if (this.process == null) { // The job thread has not checked if the job is killed yet. // setting the killed flag should be enough to abort the job. // There is no job process to kill. return; } } this.process.awaitStartup(); final boolean processkilled = this.process .softKill(KILL_TIME.toMillis(), TimeUnit.MILLISECONDS); if (!processkilled) { warn("Kill with signal TERM failed. Killing with KILL signal."); this.process.hardKill(); } } @Override public double getProgress() { return this.process != null && this.process.isComplete() ? 1.0 : 0.0; } public int getProcessId() { return this.process.getProcessId(); } @VisibleForTesting boolean isSuccess() { return this.success; } @VisibleForTesting AzkabanProcess getProcess() { return this.process; } public String getPath() { return Utils.ifNull(this.getJobPath(), ""); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils/JobExecutionException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor.utils; public class JobExecutionException extends RuntimeException { private final static long serialVersionUID = 1; public JobExecutionException(final String message) { super(message); } public JobExecutionException(final Throwable cause) { super(cause); } public JobExecutionException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils/process/AzkabanProcess.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor.utils.process; import azkaban.utils.LogGobbler; import com.google.common.base.Joiner; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.lang.reflect.Field; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import org.apache.commons.io.IOUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * An improved version of java.lang.Process. * * Output is read by separate threads to avoid deadlock and logged to log4j loggers. */ public class AzkabanProcess { public static String KILL_COMMAND = "kill"; private final String workingDir; private final List<String> cmd; private final Map<String, String> env; private final Logger logger; private final CountDownLatch startupLatch; private final CountDownLatch completeLatch; private volatile int processId; private volatile Process process; private boolean isExecuteAsUser = false; private String executeAsUserBinary = null; private String effectiveUser = null; public AzkabanProcess(final List<String> cmd, final Map<String, String> env, final String workingDir, final Logger logger) { this.cmd = cmd; this.env = env; this.workingDir = workingDir; this.processId = -1; this.startupLatch = new CountDownLatch(1); this.completeLatch = new CountDownLatch(1); this.logger = logger; } public AzkabanProcess(final List<String> cmd, final Map<String, String> env, final String workingDir, final Logger logger, final String executeAsUserBinary, final String effectiveUser) { this(cmd, env, workingDir, logger); this.isExecuteAsUser = true; this.executeAsUserBinary = executeAsUserBinary; this.effectiveUser = effectiveUser; } /** * Execute this process, blocking until it has completed. */ public void run() throws IOException { if (this.isStarted() || this.isComplete()) { throw new IllegalStateException("The process can only be used once."); } final ProcessBuilder builder = new ProcessBuilder(this.cmd); builder.directory(new File(this.workingDir)); builder.environment().putAll(this.env); builder.redirectErrorStream(true); this.process = builder.start(); try { this.processId = processId(this.process); if (this.processId == 0) { this.logger.info("Spawned process with unknown process id"); } else { this.logger.info("Spawned process with id " + this.processId); } this.startupLatch.countDown(); final LogGobbler outputGobbler = new LogGobbler( new InputStreamReader(this.process.getInputStream(), StandardCharsets.UTF_8), this.logger, Level.INFO, 30); final LogGobbler errorGobbler = new LogGobbler( new InputStreamReader(this.process.getErrorStream(), StandardCharsets.UTF_8), this.logger, Level.ERROR, 30); outputGobbler.start(); errorGobbler.start(); int exitCode = -1; try { exitCode = this.process.waitFor(); } catch (final InterruptedException e) { this.logger.info("Process interrupted. Exit code is " + exitCode, e); } this.completeLatch.countDown(); // try to wait for everything to get logged out before exiting outputGobbler.awaitCompletion(5000); errorGobbler.awaitCompletion(5000); if (exitCode != 0) { throw new ProcessFailureException(exitCode); } } finally { IOUtils.closeQuietly(this.process.getInputStream()); IOUtils.closeQuietly(this.process.getOutputStream()); IOUtils.closeQuietly(this.process.getErrorStream()); } } /** * Await the completion of this process * * @throws InterruptedException if the thread is interrupted while waiting. */ public void awaitCompletion() throws InterruptedException { this.completeLatch.await(); } /** * Await the start of this process * * When this method returns, the job process has been created and a this.processId has been set. * * @throws InterruptedException if the thread is interrupted while waiting. */ public void awaitStartup() throws InterruptedException { this.startupLatch.await(); } /** * Get the process id for this process, if it has started. * * @return The process id or -1 if it cannot be fetched */ public int getProcessId() { checkStarted(); return this.processId; } /** * Attempt to kill the process, waiting up to the given time for it to die * * @param time The amount of time to wait * @param unit The time unit * @return true iff this soft kill kills the process in the given wait time. */ public boolean softKill(final long time, final TimeUnit unit) throws InterruptedException { checkStarted(); if (this.processId != 0 && isStarted()) { try { if (this.isExecuteAsUser) { final String cmd = String.format("%s %s %s %d", this.executeAsUserBinary, this.effectiveUser, KILL_COMMAND, this.processId); Runtime.getRuntime().exec(cmd); } else { final String cmd = String.format("%s %d", KILL_COMMAND, this.processId); Runtime.getRuntime().exec(cmd); } return this.completeLatch.await(time, unit); } catch (final IOException e) { this.logger.error("Kill attempt failed.", e); } return false; } return false; } /** * Force kill this process */ public void hardKill() { checkStarted(); if (isRunning()) { if (this.processId != 0) { try { if (this.isExecuteAsUser) { final String cmd = String.format("%s %s %s -9 %d", this.executeAsUserBinary, this.effectiveUser, KILL_COMMAND, this.processId); Runtime.getRuntime().exec(cmd); } else { final String cmd = String.format("%s -9 %d", KILL_COMMAND, this.processId); Runtime.getRuntime().exec(cmd); } } catch (final IOException e) { this.logger.error("Kill attempt failed.", e); } } this.process.destroy(); } } /** * Attempt to get the process id for this process * * @param process The process to get the id from * @return The id of the process */ private int processId(final java.lang.Process process) { int processId = 0; try { final Field f = process.getClass().getDeclaredField("pid"); f.setAccessible(true); processId = f.getInt(process); } catch (final Throwable e) { e.printStackTrace(); } return processId; } /** * @return true iff the process has been started */ public boolean isStarted() { return this.startupLatch.getCount() == 0L; } /** * @return true iff the process has completed */ public boolean isComplete() { return this.completeLatch.getCount() == 0L; } /** * @return true iff the process is currently running */ public boolean isRunning() { return isStarted() && !isComplete(); } public void checkStarted() { if (!isStarted()) { throw new IllegalStateException("Process has not yet started."); } } @Override public String toString() { return "Process(cmd = " + Joiner.on(" ").join(this.cmd) + ", env = " + this.env + ", cwd = " + this.workingDir + ")"; } public boolean isExecuteAsUser() { return this.isExecuteAsUser; } public String getEffectiveUser() { return this.effectiveUser; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils/process/AzkabanProcessBuilder.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor.utils.process; import com.google.common.base.Joiner; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; /** * Helper code for building a process */ public class AzkabanProcessBuilder { private final List<String> cmd = new ArrayList<>(); private Map<String, String> env = new HashMap<>(); private String workingDir = System.getProperty("user.dir"); private Logger logger = Logger.getLogger(AzkabanProcess.class); private boolean isExecuteAsUser = false; private String executeAsUserBinaryPath = null; private String effectiveUser = null; private int stdErrSnippetSize = 30; private int stdOutSnippetSize = 30; public AzkabanProcessBuilder(final String... command) { addArg(command); } public AzkabanProcessBuilder addArg(final String... command) { for (final String c : command) { this.cmd.add(c); } return this; } public AzkabanProcessBuilder setWorkingDir(final String dir) { this.workingDir = dir; return this; } public String getWorkingDir() { return this.workingDir; } public AzkabanProcessBuilder setWorkingDir(final File f) { return setWorkingDir(f.getAbsolutePath()); } public AzkabanProcessBuilder addEnv(final String variable, final String value) { this.env.put(variable, value); return this; } public Map<String, String> getEnv() { return this.env; } public AzkabanProcessBuilder setEnv(final Map<String, String> m) { this.env = m; return this; } public int getStdErrorSnippetSize() { return this.stdErrSnippetSize; } public AzkabanProcessBuilder setStdErrorSnippetSize(final int size) { this.stdErrSnippetSize = size; return this; } public int getStdOutSnippetSize() { return this.stdOutSnippetSize; } public AzkabanProcessBuilder setStdOutSnippetSize(final int size) { this.stdOutSnippetSize = size; return this; } public AzkabanProcessBuilder setLogger(final Logger logger) { this.logger = logger; return this; } public AzkabanProcess build() { if (this.isExecuteAsUser) { return new AzkabanProcess(this.cmd, this.env, this.workingDir, this.logger, this.executeAsUserBinaryPath, this.effectiveUser); } else { return new AzkabanProcess(this.cmd, this.env, this.workingDir, this.logger); } } public List<String> getCommand() { return this.cmd; } public String getCommandString() { return Joiner.on(" ").join(getCommand()); } @Override public String toString() { return "ProcessBuilder(cmd = " + Joiner.on(" ").join(this.cmd) + ", env = " + this.env + ", cwd = " + this.workingDir + ")"; } public AzkabanProcessBuilder enableExecuteAsUser() { this.isExecuteAsUser = true; return this; } public AzkabanProcessBuilder setExecuteAsUserBinaryPath(final String executeAsUserBinaryPath) { this.executeAsUserBinaryPath = executeAsUserBinaryPath; return this; } public AzkabanProcessBuilder setEffectiveUser(final String effectiveUser) { this.effectiveUser = effectiveUser; return this; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobExecutor/utils/process/ProcessFailureException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobExecutor.utils.process; public class ProcessFailureException extends RuntimeException { private static final long serialVersionUID = 1; private final int exitCode; public ProcessFailureException(final int exitCode) { this.exitCode = exitCode; } public int getExitCode() { return this.exitCode; } @Override public String getMessage() { return "Process exited with code " + this.exitCode; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobcallback/JobCallbackConstants.java
package azkaban.jobcallback; public interface JobCallbackConstants { public static final String STATUS_TOKEN = "status"; public static final String SEQUENCE_TOKEN = "sequence"; public static final String HTTP_GET = "GET"; public static final String HTTP_POST = "POST"; public static final String MAX_POST_BODY_LENGTH_PROPERTY_KEY = "jobcallback.max.body.length"; public static final int DEFAULT_POST_BODY_LENGTH = 4096; public static final String MAX_CALLBACK_COUNT_PROPERTY_KEY = "jobcallback.max_count"; public static final int DEFAULT_MAX_CALLBACK_COUNT = 3; public static final String FIRST_JOB_CALLBACK_URL_TEMPLATE = "job.notification." + STATUS_TOKEN + ".1.url"; public static final String JOB_CALLBACK_URL_TEMPLATE = "job.notification." + STATUS_TOKEN + "." + SEQUENCE_TOKEN + ".url"; public static final String JOB_CALLBACK_REQUEST_METHOD_TEMPLATE = "job.notification." + STATUS_TOKEN + "." + SEQUENCE_TOKEN + ".method"; public static final String JOB_CALLBACK_REQUEST_HEADERS_TEMPLATE = "job.notification." + STATUS_TOKEN + "." + SEQUENCE_TOKEN + ".headers"; public static final String JOB_CALLBACK_BODY_TEMPLATE = "job.notification." + STATUS_TOKEN + "." + SEQUENCE_TOKEN + ".body"; public static final String CONTEXT_SERVER_TOKEN = "?{server}"; public static final String CONTEXT_PROJECT_TOKEN = "?{project}"; public static final String CONTEXT_FLOW_TOKEN = "?{flow}"; public static final String CONTEXT_EXECUTION_ID_TOKEN = "?{executionId}"; public static final String CONTEXT_JOB_TOKEN = "?{job}"; public static final String CONTEXT_JOB_STATUS_TOKEN = "?{status}"; public static final String HEADER_ELEMENT_DELIMITER = "\r\n"; public static final String HEADER_NAME_VALUE_DELIMITER = ":"; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobcallback/JobCallbackStatusEnum.java
package azkaban.jobcallback; public enum JobCallbackStatusEnum { STARTED, SUCCESS, FAILURE, COMPLETED }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobcallback/JobCallbackValidator.java
package azkaban.jobcallback; import static azkaban.jobcallback.JobCallbackConstants.DEFAULT_POST_BODY_LENGTH; import static azkaban.jobcallback.JobCallbackConstants.HTTP_GET; import static azkaban.jobcallback.JobCallbackConstants.HTTP_POST; import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_BODY_TEMPLATE; import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_REQUEST_METHOD_TEMPLATE; import static azkaban.jobcallback.JobCallbackConstants.JOB_CALLBACK_URL_TEMPLATE; import static azkaban.jobcallback.JobCallbackConstants.MAX_POST_BODY_LENGTH_PROPERTY_KEY; import static azkaban.jobcallback.JobCallbackConstants.SEQUENCE_TOKEN; import static azkaban.jobcallback.JobCallbackConstants.STATUS_TOKEN; import azkaban.utils.Props; import java.util.Collection; import org.apache.log4j.Logger; /** * Responsible for validating the job callback related properties at project upload time * * @author hluu */ public class JobCallbackValidator { private static final Logger logger = Logger .getLogger(JobCallbackValidator.class); /** * Make sure all the job callback related properties are valid * * @return number of valid job callback properties. Mainly for testing purpose. */ public static int validate(final String jobName, final Props serverProps, final Props jobProps, final Collection<String> errors) { final int maxNumCallback = serverProps.getInt( JobCallbackConstants.MAX_CALLBACK_COUNT_PROPERTY_KEY, JobCallbackConstants.DEFAULT_MAX_CALLBACK_COUNT); final int maxPostBodyLength = serverProps.getInt(MAX_POST_BODY_LENGTH_PROPERTY_KEY, DEFAULT_POST_BODY_LENGTH); int totalCallbackCount = 0; for (final JobCallbackStatusEnum jobStatus : JobCallbackStatusEnum.values()) { totalCallbackCount += validateBasedOnStatus(jobProps, errors, jobStatus, maxNumCallback, maxPostBodyLength); } if (logger.isDebugEnabled()) { logger.debug("Found " + totalCallbackCount + " job callbacks for job " + jobName); } return totalCallbackCount; } private static int validateBasedOnStatus(final Props jobProps, final Collection<String> errors, final JobCallbackStatusEnum jobStatus, final int maxNumCallback, final int maxPostBodyLength) { int callbackCount = 0; // replace property templates with status final String jobCallBackUrl = JOB_CALLBACK_URL_TEMPLATE.replaceFirst(STATUS_TOKEN, jobStatus.name() .toLowerCase()); final String requestMethod = JOB_CALLBACK_REQUEST_METHOD_TEMPLATE.replaceFirst(STATUS_TOKEN, jobStatus.name().toLowerCase()); final String httpBody = JOB_CALLBACK_BODY_TEMPLATE.replaceFirst(STATUS_TOKEN, jobStatus.name() .toLowerCase()); for (int i = 0; i <= maxNumCallback; i++) { // callback url final String callbackUrlKey = jobCallBackUrl.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i)); final String callbackUrlValue = jobProps.get(callbackUrlKey); // sequence number should start at 1, this is to check for sequence // number that starts a 0 if (i == 0) { if (callbackUrlValue != null) { errors.add("Sequence number starts at 1, not 0"); } continue; } if (callbackUrlValue == null || callbackUrlValue.length() == 0) { break; } else { final String requestMethodKey = requestMethod.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i)); final String methodValue = jobProps.getString(requestMethodKey, HTTP_GET); if (HTTP_POST.equals(methodValue)) { // now try to get the post body final String postBodyKey = httpBody.replaceFirst(SEQUENCE_TOKEN, Integer.toString(i)); final String postBodyValue = jobProps.get(postBodyKey); if (postBodyValue == null || postBodyValue.length() == 0) { errors.add("No POST body was specified for job callback '" + callbackUrlValue + "'"); } else if (postBodyValue.length() > maxPostBodyLength) { errors.add("POST body length is : " + postBodyValue.length() + " which is larger than supported length of " + maxPostBodyLength); } else { callbackCount++; } } else if (HTTP_GET.equals(methodValue)) { // that's cool callbackCount++; } else { errors.add("Unsupported request method: " + methodValue + " Only POST and GET are supported"); } } } return callbackCount; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobtype/JobTypeManager.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobtype; import azkaban.Constants; import azkaban.jobExecutor.JavaProcessJob; import azkaban.jobExecutor.Job; import azkaban.jobExecutor.NoopJob; import azkaban.jobExecutor.ProcessJob; import azkaban.jobExecutor.utils.JobExecutionException; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Utils; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; public class JobTypeManager { private static final Logger LOGGER = Logger.getLogger(JobTypeManager.class); private final String jobTypePluginDir; // the dir for jobtype plugins private final ClassLoader parentLoader; private final Props globalProperties; private JobTypePluginSet pluginSet; public JobTypeManager(final String jobtypePluginDir, final Props globalProperties, final ClassLoader parentClassLoader) { this.jobTypePluginDir = jobtypePluginDir; this.parentLoader = parentClassLoader; this.globalProperties = globalProperties; loadPlugins(); } public void loadPlugins() throws JobTypeManagerException { final JobTypePluginSet plugins = new JobTypePluginSet(); loadDefaultTypes(plugins); if (this.jobTypePluginDir != null) { final File pluginDir = new File(this.jobTypePluginDir); if (pluginDir.exists()) { LOGGER.info("Job type plugin directory set. Loading extra job types from " + pluginDir); try { loadPluginJobTypes(plugins); } catch (final Exception e) { LOGGER.info("Plugin jobtypes failed to load. " + e.getCause(), e); throw new JobTypeManagerException(e); } } } // Swap the plugin set. If exception is thrown, then plugin isn't swapped. synchronized (this) { this.pluginSet = plugins; } } private void loadDefaultTypes(final JobTypePluginSet plugins) throws JobTypeManagerException { LOGGER.info("Loading plugin default job types"); plugins.addPluginClass("command", ProcessJob.class); plugins.addPluginClass("javaprocess", JavaProcessJob.class); plugins.addPluginClass("noop", NoopJob.class); } // load Job Types from jobtype plugin dir private void loadPluginJobTypes(final JobTypePluginSet plugins) throws JobTypeManagerException { final File jobPluginsDir = new File(this.jobTypePluginDir); if (!jobPluginsDir.exists()) { LOGGER.error("Job type plugin dir " + this.jobTypePluginDir + " doesn't exist. Will not load any external plugins."); return; } else if (!jobPluginsDir.isDirectory()) { throw new JobTypeManagerException("Job type plugin dir " + this.jobTypePluginDir + " is not a directory!"); } else if (!jobPluginsDir.canRead()) { throw new JobTypeManagerException("Job type plugin dir " + this.jobTypePluginDir + " is not readable!"); } // Load the common properties used by all jobs that are run Props commonPluginJobProps = null; final File commonJobPropsFile = new File(jobPluginsDir, Constants.PluginManager.COMMONCONFFILE); if (commonJobPropsFile.exists()) { LOGGER.info("Common plugin job props file " + commonJobPropsFile + " found. Attempt to load."); try { commonPluginJobProps = new Props(this.globalProperties, commonJobPropsFile); } catch (final IOException e) { throw new JobTypeManagerException( "Failed to load common plugin job properties" + e.getCause()); } } else { LOGGER.info("Common plugin job props file " + commonJobPropsFile + " not found. Using only globals props"); commonPluginJobProps = new Props(this.globalProperties); } // Loads the common properties used by all plugins when loading Props commonPluginLoadProps = null; final File commonLoadPropsFile = new File(jobPluginsDir, Constants.PluginManager.COMMONSYSCONFFILE); if (commonLoadPropsFile.exists()) { LOGGER.info("Common plugin load props file " + commonLoadPropsFile + " found. Attempt to load."); try { commonPluginLoadProps = new Props(null, commonLoadPropsFile); } catch (final IOException e) { throw new JobTypeManagerException( "Failed to load common plugin loader properties" + e.getCause()); } } else { LOGGER.info("Common plugin load props file " + commonLoadPropsFile + " not found. Using empty props."); commonPluginLoadProps = new Props(); } plugins.setCommonPluginJobProps(commonPluginJobProps); plugins.setCommonPluginLoadProps(commonPluginLoadProps); // Loading job types for (final File dir : jobPluginsDir.listFiles()) { if (dir.isDirectory() && dir.canRead()) { try { loadJobTypes(dir, plugins); } catch (final Exception e) { LOGGER.error("Failed to load jobtype " + dir.getName() + e.getMessage(), e); throw new JobTypeManagerException(e); } } } } private void loadJobTypes(final File pluginDir, final JobTypePluginSet plugins) throws JobTypeManagerException { // Directory is the jobtypeName final String jobTypeName = pluginDir.getName(); LOGGER.info("Loading plugin " + jobTypeName); Props pluginJobProps = null; Props pluginLoadProps = null; Props pluginPrivateProps = null; final File pluginJobPropsFile = new File(pluginDir, Constants.PluginManager.CONFFILE); final File pluginLoadPropsFile = new File(pluginDir, Constants.PluginManager.SYSCONFFILE); if (!pluginLoadPropsFile.exists()) { LOGGER.info("Plugin load props file " + pluginLoadPropsFile + " not found."); return; } try { final Props commonPluginJobProps = plugins.getCommonPluginJobProps(); final Props commonPluginLoadProps = plugins.getCommonPluginLoadProps(); if (pluginJobPropsFile.exists()) { pluginJobProps = new Props(commonPluginJobProps, pluginJobPropsFile); } else { pluginJobProps = new Props(commonPluginJobProps); } // Set the private props. pluginPrivateProps = new Props(null, pluginLoadPropsFile); pluginPrivateProps.put("plugin.dir", pluginDir.getAbsolutePath()); plugins.addPluginPrivateProps(jobTypeName, pluginPrivateProps); pluginLoadProps = new Props(commonPluginLoadProps, pluginPrivateProps); // Adding "plugin.dir" to allow plugin.properties file could read this property. Also, user // code could leverage this property as well. pluginJobProps.put("plugin.dir", pluginDir.getAbsolutePath()); pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps); } catch (final Exception e) { LOGGER.error("pluginLoadProps to help with debugging: " + pluginLoadProps); throw new JobTypeManagerException("Failed to get jobtype properties" + e.getMessage(), e); } // Add properties into the plugin set plugins.addPluginLoadProps(jobTypeName, pluginLoadProps); if (pluginJobProps != null) { plugins.addPluginJobProps(jobTypeName, pluginJobProps); } final ClassLoader jobTypeLoader = loadJobTypeClassLoader(pluginDir, jobTypeName, plugins); final String jobtypeClass = pluginLoadProps.get("jobtype.class"); Class<? extends Job> clazz = null; try { clazz = (Class<? extends Job>) jobTypeLoader.loadClass(jobtypeClass); plugins.addPluginClass(jobTypeName, clazz); } catch (final ClassNotFoundException e) { throw new JobTypeManagerException(e); } LOGGER.info("Verifying job plugin " + jobTypeName); try { final Props fakeSysProps = new Props(pluginLoadProps); final Props fakeJobProps = new Props(pluginJobProps); final Job job = (Job) Utils.callConstructor(clazz, "dummy", fakeSysProps, fakeJobProps, LOGGER); } catch (final Throwable t) { LOGGER.info("Jobtype " + jobTypeName + " failed test!", t); throw new JobExecutionException(t); } LOGGER.info("Loaded jobtype " + jobTypeName + " " + jobtypeClass); } /** * Creates and loads all plugin resources (jars) into a ClassLoader */ private ClassLoader loadJobTypeClassLoader(final File pluginDir, final String jobTypeName, final JobTypePluginSet plugins) { // sysconf says what jars/confs to load final List<URL> resources = new ArrayList<>(); final Props pluginLoadProps = plugins.getPluginLoaderProps(jobTypeName); try { // first global classpath LOGGER.info("Adding global resources for " + jobTypeName); final List<String> typeGlobalClassPath = pluginLoadProps.getStringList("jobtype.global.classpath", null, ","); if (typeGlobalClassPath != null) { for (final String jar : typeGlobalClassPath) { final URL cpItem = new File(jar).toURI().toURL(); if (!resources.contains(cpItem)) { LOGGER.info("adding to classpath " + cpItem); resources.add(cpItem); } } } // type specific classpath LOGGER.info("Adding type resources."); final List<String> typeClassPath = pluginLoadProps.getStringList("jobtype.classpath", null, ","); if (typeClassPath != null) { for (final String jar : typeClassPath) { final URL cpItem = new File(jar).toURI().toURL(); if (!resources.contains(cpItem)) { LOGGER.info("adding to classpath " + cpItem); resources.add(cpItem); } } } final List<String> jobtypeLibDirs = pluginLoadProps.getStringList("jobtype.lib.dir", null, ","); if (jobtypeLibDirs != null) { for (final String libDir : jobtypeLibDirs) { for (final File f : new File(libDir).listFiles()) { if (f.getName().endsWith(".jar")) { resources.add(f.toURI().toURL()); LOGGER.info("adding to classpath " + f.toURI().toURL()); } } } } LOGGER.info("Adding type override resources."); for (final File f : pluginDir.listFiles()) { if (f.getName().endsWith(".jar")) { resources.add(f.toURI().toURL()); LOGGER.info("adding to classpath " + f.toURI().toURL()); } } } catch (final MalformedURLException e) { throw new JobTypeManagerException(e); } // each job type can have a different class loader LOGGER.info(String.format("Classpath for plugin[dir: %s, JobType: %s]: %s", pluginDir, jobTypeName, resources)); final ClassLoader jobTypeLoader = new URLClassLoader(resources.toArray(new URL[resources.size()]), this.parentLoader); return jobTypeLoader; } public Job buildJobExecutor(final String jobId, Props jobProps, final Logger logger) throws JobTypeManagerException { // This is final because during build phase, you should never need to swap // the pluginSet for safety reasons final JobTypePluginSet pluginSet = getJobTypePluginSet(); Job job = null; try { final String jobType = jobProps.getString("type"); if (jobType == null || jobType.length() == 0) { /* throw an exception when job name is null or empty */ throw new JobExecutionException(String.format( "The 'type' parameter for job[%s] is null or empty", jobProps)); } logger.info("Building " + jobType + " job executor. "); final Class<? extends Object> executorClass = pluginSet.getPluginClass(jobType); if (executorClass == null) { throw new JobExecutionException(String.format("Job type '" + jobType + "' is unrecognized. Could not construct job[%s] of type[%s].", jobProps, jobType)); } Props pluginJobProps = pluginSet.getPluginJobProps(jobType); // For default jobtypes, even though they don't have pluginJobProps configured, // they still need to load properties from common.properties file if it's present // because common.properties file is global to all jobtypes. if (pluginJobProps == null) { pluginJobProps = pluginSet.getCommonPluginJobProps(); } if (pluginJobProps != null) { for (final String k : pluginJobProps.getKeySet()) { if (!jobProps.containsKey(k)) { jobProps.put(k, pluginJobProps.get(k)); } } } jobProps = PropsUtils.resolveProps(jobProps); Props pluginLoadProps = pluginSet.getPluginLoaderProps(jobType); if (pluginLoadProps != null) { pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps); } else { // pluginSet.getCommonPluginLoadProps() will return null if there is no plugins directory. // hence assigning default Props() if that's the case pluginLoadProps = pluginSet.getCommonPluginLoadProps(); if (pluginLoadProps == null) { pluginLoadProps = new Props(); } } try { job = (Job) Utils.callConstructor(executorClass, jobId, pluginLoadProps, jobProps, pluginSet.getPluginPrivateProps(jobType), logger); } catch (final Exception e) { logger.info("Failed with 5 inputs with exception e = " + e.getMessage()); job = (Job) Utils.callConstructor(executorClass, jobId, pluginLoadProps, jobProps, logger); } } catch (final Exception e) { logger.error("Failed to build job executor for job " + jobId + e.getMessage()); throw new JobTypeManagerException("Failed to build job executor for job " + jobId, e); } catch (final Throwable t) { logger.error( "Failed to build job executor for job " + jobId + t.getMessage(), t); throw new JobTypeManagerException("Failed to build job executor for job " + jobId, t); } return job; } /** * Public for test reasons. Will need to move tests to the same package */ public synchronized JobTypePluginSet getJobTypePluginSet() { return this.pluginSet; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobtype/JobTypeManagerException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobtype; public class JobTypeManagerException extends RuntimeException { private static final long serialVersionUID = 1L; public JobTypeManagerException(final String message) { super(message); } public JobTypeManagerException(final Throwable cause) { super(cause); } public JobTypeManagerException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/jobtype/JobTypePluginSet.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.jobtype; import azkaban.jobExecutor.Job; import azkaban.utils.Props; import java.util.HashMap; import java.util.Map; /** * Container for job type plugins * * This contains the jobClass objects, the properties for loading plugins, and the properties given * by default to the plugin. * * This class is not thread safe, so adding to this class should only be populated and controlled by * the JobTypeManager */ public class JobTypePluginSet { private final Map<String, Class<? extends Job>> jobToClass; private final Map<String, Props> pluginJobPropsMap; private final Map<String, Props> pluginLoadPropsMap; private final Map<String, Props> pluginPrivatePropsMap; private Props commonJobProps; private Props commonLoadProps; /** * Base constructor */ public JobTypePluginSet() { this.jobToClass = new HashMap<>(); this.pluginJobPropsMap = new HashMap<>(); this.pluginLoadPropsMap = new HashMap<>(); this.pluginPrivatePropsMap = new HashMap<>(); } /** * Copy constructor */ public JobTypePluginSet(final JobTypePluginSet clone) { this.jobToClass = new HashMap<>(clone.jobToClass); this.pluginJobPropsMap = new HashMap<>(clone.pluginJobPropsMap); this.pluginLoadPropsMap = new HashMap<>(clone.pluginLoadPropsMap); this.pluginPrivatePropsMap = new HashMap<>(clone.pluginPrivatePropsMap); this.commonJobProps = clone.commonJobProps; this.commonLoadProps = clone.commonLoadProps; } /** * Gets common properties for every jobtype */ public Props getCommonPluginJobProps() { return this.commonJobProps; } /** * Sets the common properties shared in every jobtype */ public void setCommonPluginJobProps(final Props commonJobProps) { this.commonJobProps = commonJobProps; } /** * Gets the common properties used to load a plugin */ public Props getCommonPluginLoadProps() { return this.commonLoadProps; } /** * Sets the common properties used to load every plugin */ public void setCommonPluginLoadProps(final Props commonLoadProps) { this.commonLoadProps = commonLoadProps; } /** * Get the properties for a jobtype used to setup and load a plugin */ public Props getPluginLoaderProps(final String jobTypeName) { return this.pluginLoadPropsMap.get(jobTypeName); } /** * Get the plugin private properties for the jobtype */ public Props getPluginPrivateProps(final String jobTypeName) { return this.pluginPrivatePropsMap.get(jobTypeName); } /** * Get the properties that will be given to the plugin as default job properties. */ public Props getPluginJobProps(final String jobTypeName) { return this.pluginJobPropsMap.get(jobTypeName); } /** * Gets the plugin job runner class */ public Class<? extends Job> getPluginClass(final String jobTypeName) { return this.jobToClass.get(jobTypeName); } /** * Adds plugin jobtype class */ public void addPluginClass(final String jobTypeName, final Class<? extends Job> jobTypeClass) { this.jobToClass.put(jobTypeName, jobTypeClass); } /** * Adds plugin job properties used as default runtime properties */ public void addPluginJobProps(final String jobTypeName, final Props props) { this.pluginJobPropsMap.put(jobTypeName, props); } /** * Adds plugin load properties used to load the plugin */ public void addPluginLoadProps(final String jobTypeName, final Props props) { this.pluginLoadPropsMap.put(jobTypeName, props); } /** * Adds plugins private properties used by the plugin */ public void addPluginPrivateProps(final String jobTypeName, final Props props) { this.pluginPrivatePropsMap.put(jobTypeName, props); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/AbstractMetric.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; import org.apache.log4j.Logger; /** * Abstract class for Metric * * @param <T> Type of Value of a given metric */ public abstract class AbstractMetric<T> implements IMetric<T>, Cloneable { protected static final Logger logger = Logger.getLogger(MetricReportManager.class); protected String name; protected T value; protected String type; protected MetricReportManager metricManager; /** * @param metricName Name of metric * @param metricType Metric type. For display purposes. * @param initialValue Initial Value of a metric * @param manager Metric Manager whom a metric will report to */ protected AbstractMetric(final String metricName, final String metricType, final T initialValue, final MetricReportManager manager) { this.name = metricName; this.type = metricType; this.value = initialValue; this.metricManager = manager; } /** * {@inheritDoc} * * @see azkaban.metric.IMetric#getName() */ @Override public String getName() { return this.name; } /** * {@inheritDoc} * * @see azkaban.metric.IMetric#getValueType() */ @Override public String getValueType() { return this.type; } /** * {@inheritDoc} * * @see azkaban.metric.IMetric#updateMetricManager(azkaban.metric.MetricReportManager) */ @Override public void updateMetricManager(final MetricReportManager manager) { this.metricManager = manager; } /** * {@inheritDoc} * * @see azkaban.metric.IMetric#getSnapshot() */ @Override public IMetric<T> getSnapshot() throws CloneNotSupportedException { return (IMetric<T>) this.clone(); } /** * {@inheritDoc} * * @see azkaban.metric.IMetric#getValue() */ @Override public T getValue() { return this.value; } /** * Method used to notify manager for a tracking event. Metric is free to call this method as per * implementation. Timer based or Azkaban events are the most common implementation {@inheritDoc} * * @see azkaban.metric.IMetric#notifyManager() */ @Override public void notifyManager() { logger.debug(String.format("Notifying Manager for %s", this.getClass().getName())); try { this.metricManager.reportMetric(this); } catch (final Throwable ex) { logger.error( String.format("Metric Manager is not set for %s metric", this.getClass().getName()), ex); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/GangliaMetricEmitter.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; import azkaban.utils.Props; /** * MetricEmitter implementation to report metric to a ganglia gmetric process */ public class GangliaMetricEmitter implements IMetricEmitter { private static final String GANGLIA_METRIC_REPORTER_PATH = "azkaban.metric.ganglia.path"; private final String gmetricPath; /** * @param azkProps Azkaban Properties */ public GangliaMetricEmitter(final Props azkProps) { this.gmetricPath = azkProps.get(GANGLIA_METRIC_REPORTER_PATH); } private String buildCommand(final IMetric<?> metric) { String cmd = null; synchronized (metric) { cmd = String .format("%s -t %s -n %s -v %s", this.gmetricPath, metric.getValueType(), metric.getName(), metric.getValue() .toString()); } return cmd; } /** * Report metric by executing command line interface of gmetrics {@inheritDoc} * * @see azkaban.metric.IMetricEmitter#reportMetric(azkaban.metric.IMetric) */ @Override public void reportMetric(final IMetric<?> metric) throws MetricException { final String gangliaCommand = buildCommand(metric); if (gangliaCommand != null) { // executes shell command to report metric to ganglia dashboard try { final Process emission = Runtime.getRuntime().exec(gangliaCommand); final int exitCode; exitCode = emission.waitFor(); if (exitCode != 0) { throw new MetricException("Failed to report metric using gmetric"); } } catch (final Exception e) { throw new MetricException("Failed to report metric using gmetric"); } } else { throw new MetricException("Failed to build ganglia Command"); } } @Override public void purgeAllData() throws MetricException { } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/IMetric.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; /** * Interface of any Metric * * @param <T> Type of Value of a given metric */ public interface IMetric<T> { String getName(); String getValueType(); void updateMetricManager(final MetricReportManager manager); void notifyManager(); T getValue(); IMetric<T> getSnapshot() throws CloneNotSupportedException; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/IMetricEmitter.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; /** * Interface for metric emitters */ public interface IMetricEmitter { void reportMetric(final IMetric<?> metric) throws MetricException; void purgeAllData() throws MetricException; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/MetricException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; /** * Exception for Azkaban's Metric Component */ public class MetricException extends Exception { private static final long serialVersionUID = 1L; public MetricException(final String message) { super(message); } public MetricException(final Throwable cause) { super(cause); } public MetricException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/MetricReportManager.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import org.apache.log4j.Logger; /** * Manager for access or updating metric related functionality of Azkaban MetricManager is * responsible all handling all action requests from statsServlet in Exec server <p> Metric Manager * 'has a' relationship with :- <ul> <li>all the metric Azkaban is tracking</li> <li>all the * emitters Azkaban is supposed to report metrics</li> </ul></p> */ public class MetricReportManager { /** * Maximum number of metrics reporting threads */ private static final int MAX_EMITTER_THREADS = 4; private static final Logger logger = Logger.getLogger(MetricReportManager.class); // Singleton variable private static volatile MetricReportManager instance = null; private static volatile boolean isManagerEnabled; /** * List of all the metrics that Azkaban is tracking Manager is not concerned with type of metric * as long as it honors IMetric contracts */ private final List<IMetric<?>> metrics; /** * List of all the emitter listening all the metrics Manager is not concerned with how emitter is * reporting value. Manager is only responsible to notify all emitters whenever an IMetric wants * to be notified */ private final List<IMetricEmitter> metricEmitters; private final ExecutorService executorService; private MetricReportManager() { logger.debug("Instantiating Metric Manager"); this.executorService = Executors.newFixedThreadPool(MAX_EMITTER_THREADS); this.metrics = new ArrayList<>(); this.metricEmitters = new LinkedList<>(); enableManager(); } /** * @return true, if we have Instantiated and enabled metric manager from Azkaban exec server */ public static boolean isAvailable() { return isInstantiated() && isManagerEnabled; } /** * @return true, if we have Instantiated metric manager from Azkaban exec server */ public static boolean isInstantiated() { return instance != null; } /** * Get a singleton object for Metric Manager */ public static MetricReportManager getInstance() { if (instance == null) { synchronized (MetricReportManager.class) { if (instance == null) { logger.info("Instantiating MetricReportManager"); instance = new MetricReportManager(); } } } return instance; } /*** * each element of metrics List is responsible to call this method and report metrics * @param metric */ @SuppressWarnings("FutureReturnValueIgnored") public void reportMetric(final IMetric<?> metric) { if (metric != null && isAvailable()) { try { final IMetric<?> metricSnapshot; // take snapshot synchronized (metric) { metricSnapshot = metric.getSnapshot(); } logger.debug(String .format("Submitting %s metric for metric emission pool", metricSnapshot.getName())); // report to all emitters for (final IMetricEmitter metricEmitter : this.metricEmitters) { this.executorService.submit(() -> { try { metricEmitter.reportMetric(metricSnapshot); } catch (final Exception ex) { logger.error( String.format("Failed to report %s metric due to ", metricSnapshot.getName()), ex); } }); } } catch (final CloneNotSupportedException ex) { logger.error( String.format("Failed to take snapshot for %s metric", metric.getClass().getName()), ex); } } } /** * Add a metric emitter to report metric */ public void addMetricEmitter(final IMetricEmitter emitter) { this.metricEmitters.add(emitter); } /** * remove a metric emitter */ public void removeMetricEmitter(final IMetricEmitter emitter) { this.metricEmitters.remove(emitter); } /** * Get all the metric emitters */ public List<IMetricEmitter> getMetricEmitters() { return this.metricEmitters; } /** * Add a metric to be managed by Metric Manager */ public void addMetric(final IMetric<?> metric) { // metric null or already present if (metric == null) { throw new IllegalArgumentException("Cannot add a null metric"); } if (getMetricFromName(metric.getName()) == null) { logger.debug(String.format("Adding %s metric in Metric Manager", metric.getName())); this.metrics.add(metric); metric.updateMetricManager(this); } else { logger.error("Failed to add metric"); } } /** * Get metric object for a given metric name * * @param name metricName * @return metric Object, if found. Otherwise null. */ public IMetric<?> getMetricFromName(final String name) { IMetric<?> metric = null; if (name != null) { for (final IMetric<?> currentMetric : this.metrics) { if (currentMetric.getName().equals(name)) { metric = currentMetric; break; } } } return metric; } /** * Get all the emitters */ public List<IMetric<?>> getAllMetrics() { return this.metrics; } public void enableManager() { logger.info("Enabling Metric Manager"); isManagerEnabled = true; } /** * Disable Metric Manager and ask all emitters to purge all available data. */ public void disableManager() { logger.info("Disabling Metric Manager"); if (isManagerEnabled) { isManagerEnabled = false; for (final IMetricEmitter emitter : this.metricEmitters) { try { emitter.purgeAllData(); } catch (final MetricException ex) { logger.error("Failed to purge data ", ex); } } } } /** * Shutdown execution service {@inheritDoc} * * @see java.lang.Object#finalize() */ @Override protected void finalize() { this.executorService.shutdown(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/TimeBasedReportingMetric.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric; import java.util.Timer; import java.util.TimerTask; /** * Metrics tracked after every interval using timer * * @param <T> Type of Value of a given metric */ public abstract class TimeBasedReportingMetric<T> extends AbstractMetric<T> { protected long MAX_MILLISEC_INTERVAL = 60 * 60 * 1000; protected long MIN_MILLISEC_INTERVAL = 3 * 1000; private Timer timer; /** * @param metricName Name of metric * @param metricType Metric type. For display purposes. * @param initialValue Initial Value of a metric * @param manager Metric Manager whom a metric will report to * @param interval Time interval for metric tracking */ public TimeBasedReportingMetric(final String metricName, final String metricType, final T initialValue, final MetricReportManager manager, final long interval) throws MetricException { super(metricName, metricType, initialValue, manager); if (!isValidInterval(interval)) { throw new MetricException("Invalid interval: Cannot instantiate timer"); } this.timer = new Timer(); this.timer.schedule(getTimerTask(), interval, interval); } /** * Get a TimerTask to reschedule Timer * * @return An anonymous TimerTask class */ private TimerTask getTimerTask() { final TimeBasedReportingMetric<T> lockObject = this; final TimerTask recurringReporting = new TimerTask() { @Override public void run() { synchronized (lockObject) { preTrackingEventMethod(); notifyManager(); postTrackingEventMethod(); } } }; return recurringReporting; } /** * Method to change tracking interval */ public void updateInterval(final long interval) throws MetricException { if (!isValidInterval(interval)) { throw new MetricException("Invalid interval: Cannot update timer"); } logger.debug(String .format("Updating tracking interval to %d milisecond for %s metric", interval, getName())); this.timer.cancel(); this.timer = new Timer(); this.timer.schedule(getTimerTask(), interval, interval); } private boolean isValidInterval(final long interval) { return interval >= this.MIN_MILLISEC_INTERVAL && interval <= this.MAX_MILLISEC_INTERVAL; } /** * This method is responsible for making any last minute update to value, if any */ protected abstract void preTrackingEventMethod(); /** * This method is responsible for making any post processing after tracking */ protected abstract void postTrackingEventMethod(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/inmemoryemitter/InMemoryHistoryNode.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric.inmemoryemitter; import java.util.Date; /** * A snapshot of metric's value */ public class InMemoryHistoryNode { private final Object value; private final Date date; /** * Takes snapshot of the metric with a given value */ public InMemoryHistoryNode(final Object val) { this.value = val; this.date = new Date(); } public Object getValue() { return this.value; } public Date getTimestamp() { return this.date; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metric/inmemoryemitter/InMemoryMetricEmitter.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metric.inmemoryemitter; import azkaban.metric.IMetric; import azkaban.metric.IMetricEmitter; import azkaban.metric.MetricException; import azkaban.utils.Props; import java.util.Date; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedBlockingDeque; import java.util.concurrent.TimeUnit; import org.apache.commons.math3.stat.descriptive.DescriptiveStatistics; import org.apache.log4j.Logger; /** * Metric Emitter which maintains in memory snapshots of the metrics This is also the default metric * emitter and used by /stats servlet */ public class InMemoryMetricEmitter implements IMetricEmitter { protected static final Logger logger = Logger.getLogger(InMemoryMetricEmitter.class); private static final String INMEMORY_METRIC_REPORTER_WINDOW = "azkaban.metric.inmemory.interval"; private static final String INMEMORY_METRIC_NUM_INSTANCES = "azkaban.metric.inmemory.maxinstances"; private static final String INMEMORY_METRIC_STANDARDDEVIATION_FACTOR = "azkaban.metric.inmemory.standardDeviationFactor"; private final double standardDeviationFactor; /** * Data structure to keep track of snapshots */ protected Map<String, LinkedBlockingDeque<InMemoryHistoryNode>> historyListMapping; /** * Interval (in millisecond) from today for which we should maintain the in memory snapshots */ private long timeWindow; /** * Maximum number of snapshots that should be displayed on /stats servlet */ private long numInstances; /** * @param azkProps Azkaban Properties */ public InMemoryMetricEmitter(final Props azkProps) { this.historyListMapping = new ConcurrentHashMap<>(); this.timeWindow = azkProps.getLong(INMEMORY_METRIC_REPORTER_WINDOW, 60 * 60 * 24 * 7 * 1000); this.numInstances = azkProps.getLong(INMEMORY_METRIC_NUM_INSTANCES, 50); this.standardDeviationFactor = azkProps.getDouble(INMEMORY_METRIC_STANDARDDEVIATION_FACTOR, 2); } /** * Update reporting interval * * @param val interval in milliseconds */ public synchronized void setReportingInterval(final long val) { this.timeWindow = val; } /** * Set number of /stats servlet display points */ public void setReportingInstances(final long num) { this.numInstances = num; } /** * Ingest metric in snapshot data structure while maintaining interval {@inheritDoc} * * @see azkaban.metric.IMetricEmitter#reportMetric(azkaban.metric.IMetric) */ @Override public void reportMetric(final IMetric<?> metric) throws MetricException { final String metricName = metric.getName(); if (!this.historyListMapping.containsKey(metricName)) { logger.info("First time capturing metric: " + metricName); this.historyListMapping.put(metricName, new LinkedBlockingDeque<>()); } synchronized (this.historyListMapping.get(metricName)) { logger.debug("Ingesting metric: " + metricName); this.historyListMapping.get(metricName).add(new InMemoryHistoryNode(metric.getValue())); cleanUsingTime(metricName, this.historyListMapping.get(metricName).peekLast().getTimestamp()); } } /** * Get snapshots for a given metric at a given time * * @param metricName name of the metric * @param from Start date * @param to end date * @param useStats get statistically significant points only * @return List of snapshots */ public List<InMemoryHistoryNode> getMetrics(final String metricName, final Date from, final Date to, final Boolean useStats) throws ClassCastException { final LinkedList<InMemoryHistoryNode> selectedLists = new LinkedList<>(); if (this.historyListMapping.containsKey(metricName)) { logger.debug("selecting snapshots within time frame"); synchronized (this.historyListMapping.get(metricName)) { for (final InMemoryHistoryNode node : this.historyListMapping.get(metricName)) { if (node.getTimestamp().after(from) && node.getTimestamp().before(to)) { selectedLists.add(node); } if (node.getTimestamp().after(to)) { break; } } } // selecting nodes if num of nodes > numInstances if (useStats) { statBasedSelectMetricHistory(selectedLists); } else { generalSelectMetricHistory(selectedLists); } } cleanUsingTime(metricName, new Date()); return selectedLists; } /** * filter snapshots using statistically significant points only * * @param selectedLists list of snapshots */ private void statBasedSelectMetricHistory(final LinkedList<InMemoryHistoryNode> selectedLists) throws ClassCastException { logger.debug("selecting snapshots which are far away from mean value"); final DescriptiveStatistics descStats = getDescriptiveStatistics(selectedLists); final Double mean = descStats.getMean(); final Double std = descStats.getStandardDeviation(); final Iterator<InMemoryHistoryNode> ite = selectedLists.iterator(); while (ite.hasNext()) { final InMemoryHistoryNode currentNode = ite.next(); final double value = ((Number) currentNode.getValue()).doubleValue(); // remove all elements which lies in 95% value band if (value < mean + this.standardDeviationFactor * std && value > mean - this.standardDeviationFactor * std) { ite.remove(); } } } private DescriptiveStatistics getDescriptiveStatistics( final LinkedList<InMemoryHistoryNode> selectedLists) throws ClassCastException { final DescriptiveStatistics descStats = new DescriptiveStatistics(); for (final InMemoryHistoryNode node : selectedLists) { descStats.addValue(((Number) node.getValue()).doubleValue()); } return descStats; } /** * filter snapshots by evenly selecting points across the interval * * @param selectedLists list of snapshots */ private void generalSelectMetricHistory(final LinkedList<InMemoryHistoryNode> selectedLists) { logger.debug("selecting snapshots evenly from across the time interval"); if (selectedLists.size() > this.numInstances) { final double step = (double) selectedLists.size() / this.numInstances; long nextIndex = 0, currentIndex = 0, numSelectedInstances = 1; final Iterator<InMemoryHistoryNode> ite = selectedLists.iterator(); while (ite.hasNext()) { ite.next(); if (currentIndex == nextIndex) { nextIndex = (long) Math.floor(numSelectedInstances * step + 0.5); numSelectedInstances++; } else { ite.remove(); } currentIndex++; } } } /** * Remove snapshots to maintain reporting interval * * @param metricName Name of the metric * @param firstAllowedDate End date of the interval */ private void cleanUsingTime(final String metricName, final Date firstAllowedDate) { if (this.historyListMapping.containsKey(metricName) && this.historyListMapping.get(metricName) != null) { synchronized (this.historyListMapping.get(metricName)) { InMemoryHistoryNode firstNode = this.historyListMapping.get(metricName).peekFirst(); long localCopyOfTimeWindow = 0; // go ahead for clean up using latest possible value of interval // any interval change will not affect on going clean up synchronized (this) { localCopyOfTimeWindow = this.timeWindow; } // removing objects older than Interval time from firstAllowedDate while (firstNode != null && TimeUnit.MILLISECONDS .toMillis(firstAllowedDate.getTime() - firstNode.getTimestamp().getTime()) > localCopyOfTimeWindow) { this.historyListMapping.get(metricName).removeFirst(); firstNode = this.historyListMapping.get(metricName).peekFirst(); } } } } /** * Clear snapshot data structure {@inheritDoc} * * @see azkaban.metric.IMetricEmitter#purgeAllData() */ @Override public void purgeAllData() throws MetricException { this.historyListMapping.clear(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/metrics/CommonMetrics.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.metrics; import com.codahale.metrics.Counter; import com.codahale.metrics.Meter; import javax.inject.Inject; import javax.inject.Singleton; /** * This singleton class CommonMetrics is in charge of collecting varieties of metrics which are * accessed in both web and exec modules. That said, these metrics will be exposed in both Web * server and executor. */ @Singleton public class CommonMetrics { public static final String FLOW_FAIL_METER_NAME = "flow-fail-meter"; public static final String DISPATCH_FAIL_METER_NAME = "dispatch-fail-meter"; public static final String DISPATCH_SUCCESS_METER_NAME = "dispatch-success-meter"; public static final String SEND_EMAIL_FAIL_METER_NAME = "send-email-fail-meter"; public static final String SEND_EMAIL_SUCCESS_METER_NAME = "send-email-success-meter"; public static final String SUBMIT_FLOW_SUCCESS_METER_NAME = "submit-flow-success-meter"; public static final String SUBMIT_FLOW_FAIL_METER_NAME = "submit-flow-fail-meter"; public static final String SUBMIT_FLOW_SKIP_METER_NAME = "submit-flow-skip-meter"; public static final String OOM_WAITING_JOB_COUNT_NAME = "OOM-waiting-job-count"; public static final String UPLOAD_FAT_PROJECT_METER_NAME = "upload-fat-project-meter"; public static final String UPLOAD_THIN_PROJECT_METER_NAME = "upload-thin-project-meter"; private Counter OOMWaitingJobCount; private final MetricsManager metricsManager; private Meter flowFailMeter; private Meter dispatchFailMeter; private Meter dispatchSuccessMeter; private Meter sendEmailFailMeter; private Meter sendEmailSuccessMeter; private Meter submitFlowSuccessMeter; private Meter submitFlowFailMeter; private Meter submitFlowSkipMeter; private Meter uploadFatProjectMeter; private Meter uploadThinProjectMeter; @Inject public CommonMetrics(final MetricsManager metricsManager) { this.metricsManager = metricsManager; setupAllMetrics(); } private void setupAllMetrics() { this.flowFailMeter = this.metricsManager.addMeter(FLOW_FAIL_METER_NAME); this.dispatchFailMeter = this.metricsManager.addMeter(DISPATCH_FAIL_METER_NAME); this.dispatchSuccessMeter = this.metricsManager.addMeter(DISPATCH_SUCCESS_METER_NAME); this.sendEmailFailMeter = this.metricsManager.addMeter(SEND_EMAIL_FAIL_METER_NAME); this.sendEmailSuccessMeter = this.metricsManager.addMeter(SEND_EMAIL_SUCCESS_METER_NAME); this.submitFlowSuccessMeter = this.metricsManager.addMeter(SUBMIT_FLOW_SUCCESS_METER_NAME); this.submitFlowFailMeter = this.metricsManager.addMeter(SUBMIT_FLOW_FAIL_METER_NAME); this.submitFlowSkipMeter = this.metricsManager.addMeter(SUBMIT_FLOW_SKIP_METER_NAME); this.OOMWaitingJobCount = this.metricsManager.addCounter(OOM_WAITING_JOB_COUNT_NAME); this.uploadFatProjectMeter = this.metricsManager.addMeter(UPLOAD_FAT_PROJECT_METER_NAME); this.uploadThinProjectMeter = this.metricsManager.addMeter(UPLOAD_THIN_PROJECT_METER_NAME); } /** * Mark flowFailMeter when a flow is considered as FAILED. This method could be called by Web * Server or Executor, as they both detect flow failure. */ public void markFlowFail() { this.flowFailMeter.mark(); } /** * Mark dispatchFailMeter when web server fails to dispatch a flow to executor. */ public void markDispatchFail() { this.dispatchFailMeter.mark(); } /** * Mark dispatchSuccessMeter when web server successfully dispatches a flow to executor. */ public void markDispatchSuccess() { this.dispatchSuccessMeter.mark(); } /** * Mark sendEmailFailMeter when an email fails to be sent out. */ public void markSendEmailFail() { this.sendEmailFailMeter.mark(); } /** * Mark sendEmailSuccessMeter when an email is sent out successfully. */ public void markSendEmailSuccess() { this.sendEmailSuccessMeter.mark(); } /** * Mark submitFlowSuccessMeter when a flow is submitted for execution successfully. */ public void markSubmitFlowSuccess() { this.submitFlowSuccessMeter.mark(); } /** * Mark submitFlowFailMeter when a flow submitted for execution is skipped. */ public void markSubmitFlowSkip() { this.submitFlowSkipMeter.mark(); } /** * Mark submitFlowFailMeter when a flow fails to be submitted for execution. */ public void markSubmitFlowFail() { this.submitFlowFailMeter.mark(); } /** * Mark uploadFatProjectMeter when a fat project zip is uploaded to the web server. */ public void markUploadFatProject() { this.uploadFatProjectMeter.mark(); } /** * Mark uploadThinProjectMeter when a thin project zip is uploaded to the web server. */ public void markUploadThinProject() { this.uploadThinProjectMeter.mark(); } /** * Mark the occurrence of a job waiting event due to OOM */ public void incrementOOMJobWaitCount() { this.OOMWaitingJobCount.inc(); } /** * Unmark the occurrence of a job waiting event due to OOM */ public void decrementOOMJobWaitCount() { this.OOMWaitingJobCount.dec(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ArchiveUnthinner.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.project.validator.ValidationReport; import azkaban.project.validator.ValidationStatus; import azkaban.spi.Dependency; import azkaban.spi.DependencyFile; import azkaban.spi.FileValidationStatus; import azkaban.utils.DependencyTransferException; import azkaban.utils.DependencyTransferManager; import azkaban.utils.FileIOUtils; import azkaban.utils.InvalidHashException; import azkaban.utils.Props; import azkaban.utils.ValidatorUtils; import com.google.common.collect.Sets; import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; import javax.inject.Inject; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static azkaban.utils.ThinArchiveUtils.*; /** * Handles processing of uploaded Thin Archives to the web server. * * This class exposes one public method, validateThinProject() which provides * the all the meat of the processing for Thin Archives. In summary it will: * * 1. Parse the startup-dependencies.json file * * 2. Generate a validation key from the project validators to use for querying the database. For any two projects * that produce the same validation key, the validator is GUARANTEED to produce the same result for any given * unique JAR that is in both the projects. In other words if mylib-1.0.0.jar is present in ProjectA and THE EXACT SAME FILE * (mylib-1.0.0.jar) is also present in ProjectB and generating a validation key for each project results in an IDENTICAL key, * the validation results for both JARs will ALSO be IDENTICAL. If the validation keys for the projects are different, the * validation results for the JARs (despite them being the same JAR) may or may not be identical, there is no guarantee in * that case. * * 3. Query the database to determine which dependencies have already been validated for the given validation key. * 4. Download NEW dependencies (not listed in the database) from the REMOTE origin. * 5. Validate the whole project with the NEW dependencies included. * 6. If the project failed validation with ValidationStatus.ERROR, return the report and stop there. Otherwise... * 7. Update the database to cache the results of the validator, specifically: the newly downloaded JARs the validator * removed (REMOVED), and the newly downloaded JARs that were unmodified (VALID). * 8. Keep any files modified by the validator in the project archive. * 9. Remove entries from the startup-dependencies.json file for JARs that were modified during validation OR JARs that * were removed during validation OR JARs that were cached as ValidationStatus.REMOVED in the database (from the query in * step 2). * 10. Return the validation reports, including an additional validation report specifically for actions taken based on * cached validation actions (i.e. removing JARs that were cached as REMOVED in the database). * 11. Celebrate, we're done! :) */ @Singleton public class ArchiveUnthinner { private static final Logger log = LoggerFactory.getLogger(ArchiveUnthinner.class); public static final String CACHED_VALIDATOR_REPORT_NAME = "Cached Validator Actions"; private final JdbcDependencyManager jdbcDependencyManager; private final DependencyTransferManager dependencyTransferManager; private final ValidatorUtils validatorUtils; private final boolean isEnabled; @Inject public ArchiveUnthinner(final ValidatorUtils validatorUtils, final JdbcDependencyManager jdbcDependencyManager, final DependencyTransferManager dependencyTransferManager) { this.validatorUtils = validatorUtils; this.jdbcDependencyManager = jdbcDependencyManager; this.dependencyTransferManager = dependencyTransferManager; // The dependency transfer manager must be enabled for thin archive to be supported this.isEnabled = dependencyTransferManager.isEnabled(); } /** * @param project current project * @param projectFolder current project folder * @param startupDependenciesFile startup-dependencies.json file for this project * @param additionalValidatorProps additional props for the validator * * @return Map of Report Name -> Validation Report */ public Map<String, ValidationReport> validateThinProject(final Project project, final File projectFolder, final File startupDependenciesFile, final Props additionalValidatorProps) { if (!isEnabled) { throw new ProjectManagerException("Thin archive support is not yet enabled on this cluster."); } Set<Dependency> dependencies = getDependenciesFromSpec(startupDependenciesFile); String validationKey = this.validatorUtils.getCacheKey(project, projectFolder, additionalValidatorProps); // Find the cached validation status (or NEW if the dep isn't cached) for each dependency. Map<Dependency, FileValidationStatus> depsToValidationStatus = getValidationStatuses(dependencies, validationKey); // removedCachedDeps: dependencies that have been processed before and are blacklisted (so should be removed) Set<Dependency> removedCachedDeps = filterValidationStatus(depsToValidationStatus, FileValidationStatus.REMOVED); // validCachedDeps: dependencies that are in storage and already verified to be valid Set<Dependency> validCachedDeps = filterValidationStatus(depsToValidationStatus, FileValidationStatus.VALID); // newDeps: dependencies that are not in storage and need to be verified Set<Dependency> newDeps = filterValidationStatus(depsToValidationStatus, FileValidationStatus.NEW); // Download the new dependencies final Set<DependencyFile> downloadedDeps = downloadDependencyFiles(projectFolder, newDeps); // Validate the project Map<String, ValidationReport> reports = this.validatorUtils.validateProject(project, projectFolder, additionalValidatorProps); if (reports.values().stream().anyMatch(r -> r.getStatus() == ValidationStatus.ERROR)) { // No point continuing, this project has been rejected, so just return the validation report // and don't waste any more time. return reports; } // Find which dependencies were removed, modified or untouched by the validator // pathToDownloadedDeps is created for performance reasons to allow getDepsFromReports to run in O(n) time // instead of O(n^2). Map<String, DependencyFile> pathToDownloadedDeps = getPathToDepFileMap(downloadedDeps); Set<DependencyFile> removedDownloadedDeps = getDepsFromReports(reports, pathToDownloadedDeps, ValidationReport::getRemovedFiles); Set<DependencyFile> modifiedDownloadedDeps = getDepsFromReports(reports, pathToDownloadedDeps, ValidationReport::getModifiedFiles); Set<DependencyFile> untouchedDownloadedDeps = Sets.difference(downloadedDeps, Sets.union(removedDownloadedDeps, modifiedDownloadedDeps)); updateValidationStatuses(untouchedDownloadedDeps, removedDownloadedDeps, validationKey); // Create a new report that will include details of actions taken based on previous cached validation actions. ValidationReport cacheReport = new ValidationReport(); // Add warnings for files removed due to a cached validation status of REMOVED. Note that we don't have to manually // add warnings for removedDownloadedDeps because the validator should already create warnings for them during // validation. The dependencies that have a cached validation status of REMOVED are not actually downloaded and // thus are not passed through the validator so no warnings will generated for them - so we have to add our own. cacheReport.addWarningMsgs(getWarningsFromRemovedDeps(removedCachedDeps)); // See if any downloaded deps were modified/removed OR if there are any cached removed dependencies if (untouchedDownloadedDeps.size() < downloadedDeps.size() || removedCachedDeps.size() > 0) { // Either one or more of the dependencies we downloaded was removed/modified during validation // OR there are cached removed dependencies. Either way we need to remove them from the // startup-dependencies.json file. // Indicate in the cacheReport that we modified startup-dependencies.json cacheReport.addModifiedFile(startupDependenciesFile); // Get the final list of startup dependencies that will be downloadable from storage and update the // startup-dependencies.json file to include only them. Any dependencies originally listed in the // startup-dependencies.json file that will be removed during the update must have either been removed // by the validator - or will be included in the zip itself. Set<Dependency> finalDeps = Sets.union(validCachedDeps, untouchedDownloadedDeps); rewriteStartupDependencies(startupDependenciesFile, finalDeps); } // Delete from the project untouched downloaded dependencies untouchedDownloadedDeps.forEach(d -> d.getFile().delete()); // Add the cacheReport to the list of reports reports.put(CACHED_VALIDATOR_REPORT_NAME, cacheReport); return reports; } private void rewriteStartupDependencies(final File startupDependenciesFile, final Set<Dependency> finalDependencies) { // Write this list back to the startup-dependencies.json file try { writeStartupDependencies(startupDependenciesFile, finalDependencies); } catch (IOException e) { throw new ProjectManagerException("Error while writing new startup-dependencies.json", e); } } private Set<Dependency> getDependenciesFromSpec(final File startupDependenciesFile) { try { return parseStartupDependencies(startupDependenciesFile); } catch (IOException e) { throw new ProjectManagerException("Unable to open or parse startup-dependencies.json. Please ensure " + "that the JSON contained is valid and properly conforms to the " + "spec format.", e); } catch (InvalidHashException e) { throw new ProjectManagerException("One or more of the SHA1 hashes in startup-dependencies.json was invalid", e); } } private Map<Dependency, FileValidationStatus> getValidationStatuses(final Set<Dependency> deps, final String validationKey) { try { return this.jdbcDependencyManager.getValidationStatuses(deps, validationKey); } catch (SQLException e) { throw new ProjectManagerException( String.format("Unable to query DB for validation statuses " + "for project with validationKey %s", validationKey)); } } private void updateValidationStatuses(final Set<? extends Dependency> untouchedDeps, final Set<? extends Dependency> removedDeps, final String validationKey) { // untouchedDeps are new dependencies that we have just validated and found to be VALID. // removedDeps are new dependencies that we have just validated and found to be REMOVED. Map<Dependency, FileValidationStatus> depValidationStatuses = new HashMap<>(); // NOTE: .map(Dependency::makeCopy) is to ensure our map keys are actually of type Dependency not DependencyFile untouchedDeps.stream().map(Dependency::copy).forEach(d -> depValidationStatuses.put(d, FileValidationStatus.VALID)); removedDeps.stream().map(Dependency::copy).forEach(d -> depValidationStatuses.put(d, FileValidationStatus.REMOVED)); try { this.jdbcDependencyManager.updateValidationStatuses(depValidationStatuses, validationKey); } catch (SQLException e) { throw new ProjectManagerException( String.format("Unable to update DB for validation statuses " + "for project with validationKey %s", validationKey)); } } private Set<DependencyFile> downloadDependencyFiles(final File projectFolder, final Set<Dependency> toDownload) { final Set<DependencyFile> depFiles = toDownload.stream().map(d -> { File downloadedJar = new File(projectFolder, d.getDestination() + File.separator + d.getFileName()); return d.makeDependencyFile(downloadedJar); }).collect(Collectors.toSet()); try { this.dependencyTransferManager.downloadAllDependencies(depFiles); } catch (DependencyTransferException e) { throw new ProjectManagerException(e.getMessage(), e.getCause()); } return depFiles; } private Set<DependencyFile> getDepsFromReports(final Map<String, ValidationReport> reports, final Map<String, DependencyFile> pathToDep, final Function<ValidationReport, Set<File>> fn) { return reports.values() .stream() .map(fn) .flatMap(Collection::stream) .map(f -> pathToDep.get(FileIOUtils.getCanonicalPath(f))) .filter(Objects::nonNull) // Some modified/removed files will not be a dependency (i.e. snapshot jar) .collect(Collectors.toSet()); } private Map<String, DependencyFile> getPathToDepFileMap(final Set<DependencyFile> depFiles) { return depFiles .stream() .collect(Collectors.toMap(d -> FileIOUtils.getCanonicalPath(d.getFile()), e -> e)); } private Set<Dependency> filterValidationStatus(final Map<Dependency, FileValidationStatus> validationStatuses, final FileValidationStatus status) { return validationStatuses .keySet() .stream() .filter(d -> validationStatuses.get(d) == status) .collect(Collectors.toSet()); } private Set<String> getWarningsFromRemovedDeps(final Set<? extends Dependency> removedDeps) { return removedDeps .stream() .map(d -> String.format("Removed blacklisted file %s", d.getFileName())) .collect(Collectors.toSet()); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/AzkabanFlow.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import azkaban.Constants; import azkaban.utils.Props; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Flow level definition of the DAG. * Contains a list of AzkabanNodes and related flow properties. * Introduced in Flow 2.0 design. */ public class AzkabanFlow extends AzkabanNode { private final Map<String, AzkabanNode> nodes; private final FlowTrigger flowTrigger; private AzkabanFlow(final String name, final Props props, final String condition, final Map<String, AzkabanNode> nodes, final List<String> dependsOn, final FlowTrigger flowTrigger) { super(name, Constants.FLOW_NODE_TYPE, props, condition, dependsOn); this.nodes = nodes; this.flowTrigger = flowTrigger; } public Map<String, AzkabanNode> getNodes() { return this.nodes; } public AzkabanNode getNode(final String name) { return this.nodes.get(name); } public FlowTrigger getFlowTrigger() { return this.flowTrigger; } public static class AzkabanFlowBuilder { private String name; private Props props; private String condition; private List<String> dependsOn; private Map<String, AzkabanNode> nodes; private FlowTrigger flowTrigger; public AzkabanFlowBuilder name(final String name) { this.name = name; return this; } public AzkabanFlowBuilder props(final Props props) { this.props = props; return this; } public AzkabanFlowBuilder condition(final String condition) { this.condition = condition; return this; } public AzkabanFlowBuilder dependsOn(final List<String> dependsOn) { this.dependsOn = dependsOn == null ? Collections.emptyList() : ImmutableList.copyOf(dependsOn); return this; } public AzkabanFlowBuilder nodes(final Collection<? extends AzkabanNode> azkabanNodes) { final Map<String, AzkabanNode> tempNodes = new HashMap<>(); for (final AzkabanNode node : azkabanNodes) { tempNodes.put(node.getName(), node); } this.nodes = ImmutableMap.copyOf(tempNodes); return this; } public AzkabanFlowBuilder flowTrigger(final FlowTrigger flowTrigger) { this.flowTrigger = flowTrigger; return this; } public AzkabanFlow build() { return new AzkabanFlow(this.name, this.props, this.condition, this.nodes, this.dependsOn, this .flowTrigger); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/AzkabanJob.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import azkaban.utils.Props; import com.google.common.collect.ImmutableList; import java.util.Collections; import java.util.List; /** * The smallest individual unit of execution in Azkaban. * Contains information about job type and related properties. */ public class AzkabanJob extends AzkabanNode { private AzkabanJob(final String name, final String type, final Props props, final String condition, final List<String> dependsOn) { super(name, type, props, condition, dependsOn); } public static class AzkabanJobBuilder { private String name; private String type; private Props props; private String condition; private List<String> dependsOn; public AzkabanJobBuilder name(final String name) { this.name = name; return this; } public AzkabanJobBuilder type(final String type) { this.type = type; return this; } public AzkabanJobBuilder props(final Props props) { this.props = props; return this; } public AzkabanJobBuilder condition(final String condition) { this.condition = condition; return this; } public AzkabanJobBuilder dependsOn(final List<String> dependsOn) { // A node may or may not have dependencies. this.dependsOn = dependsOn == null ? Collections.emptyList() : ImmutableList.copyOf(dependsOn); return this; } public AzkabanJob build() { return new AzkabanJob(this.name, this.type, this.props, this.condition, this.dependsOn); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/AzkabanNode.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import static java.util.Objects.requireNonNull; import azkaban.utils.Props; import java.util.List; /** * A unit of execution that could be either a job or a flow. */ public abstract class AzkabanNode { protected final String name; protected final String type; protected final Props props; protected final String condition; protected final List<String> dependsOn; public AzkabanNode(final String name, final String type, final Props props, final String condition, final List<String> dependsOn) { this.name = requireNonNull(name); this.type = requireNonNull(type); this.props = requireNonNull(props); this.condition = condition; this.dependsOn = dependsOn; } public String getName() { return this.name; } public String getType() { return this.type; } public Props getProps() { return this.props; } public String getCondition() { return this.condition; } public List<String> getDependsOn() { return this.dependsOn; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/AzkabanProjectLoader.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import static azkaban.utils.ThinArchiveUtils.*; import static java.util.Objects.requireNonNull; import azkaban.Constants; import azkaban.Constants.ConfigurationKeys; import azkaban.db.DatabaseOperator; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutionReference; import azkaban.executor.ExecutorLoader; import azkaban.executor.ExecutorManagerException; import azkaban.flow.Flow; import azkaban.metrics.CommonMetrics; import azkaban.project.FlowLoaderUtils.DirFilter; import azkaban.project.FlowLoaderUtils.SuffixFilter; import azkaban.project.ProjectLogEvent.EventType; import azkaban.project.validator.ValidationReport; import azkaban.project.validator.ValidationStatus; import azkaban.spi.Storage; import azkaban.storage.ProjectStorageManager; import azkaban.user.User; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.Utils; import azkaban.utils.ValidatorUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; import java.util.zip.ZipFile; import javax.inject.Inject; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Handles the downloading and uploading of projects. */ class AzkabanProjectLoader { private static final Logger log = LoggerFactory.getLogger(AzkabanProjectLoader.class); private static final String DIRECTORY_FLOW_REPORT_KEY = "Directory Flow"; private static final String TMP_MODIFIED_ZIP_POSTFIX = ".byte-ray.new"; private final Props props; private final CommonMetrics commonMetrics; private final ProjectLoader projectLoader; private final ProjectStorageManager projectStorageManager; private final FlowLoaderFactory flowLoaderFactory; private final DatabaseOperator dbOperator; private final ArchiveUnthinner archiveUnthinner; private final File tempDir; private final int projectVersionRetention; private final ExecutorLoader executorLoader; private final Storage storage; private final ValidatorUtils validatorUtils; @Inject AzkabanProjectLoader(final Props props, final CommonMetrics commonMetrics, final ProjectLoader projectLoader, final ProjectStorageManager projectStorageManager, final FlowLoaderFactory flowLoaderFactory, final ExecutorLoader executorLoader, final DatabaseOperator databaseOperator, final Storage storage, final ArchiveUnthinner archiveUnthinner, final ValidatorUtils validatorUtils) { this.props = requireNonNull(props, "Props is null"); this.projectLoader = requireNonNull(projectLoader, "project Loader is null"); this.projectStorageManager = requireNonNull(projectStorageManager, "Storage Manager is null"); this.flowLoaderFactory = requireNonNull(flowLoaderFactory, "Flow Loader Factory is null"); this.commonMetrics = commonMetrics; this.dbOperator = databaseOperator; this.storage = storage; this.archiveUnthinner = archiveUnthinner; this.validatorUtils = validatorUtils; this.tempDir = new File(props.getString(ConfigurationKeys.PROJECT_TEMP_DIR, "temp")); this.executorLoader = executorLoader; if (!this.tempDir.exists()) { log.info("Creating temp dir: " + this.tempDir.getAbsolutePath()); this.tempDir.mkdirs(); } else { log.info("Using temp dir: " + this.tempDir.getAbsolutePath()); } this.projectVersionRetention = props.getInt(ConfigurationKeys.PROJECT_VERSION_RETENTION, 3); log.info("Project version retention is set to " + this.projectVersionRetention); } public Map<String, ValidationReport> uploadProject(final Project project, final File archive, final String fileType, final User uploader, final Props additionalProps, final String uploaderIPAddr) throws ProjectManagerException, ExecutorManagerException { log.info("Uploading files to " + project.getName()); final Map<String, ValidationReport> reports; File folder = null; final FlowLoader loader; try { folder = unzipProject(archive, fileType); final File startupDependencies = getStartupDependenciesFile(folder); final boolean isThinProject = startupDependencies.exists(); reports = isThinProject ? this.archiveUnthinner.validateThinProject(project, folder, startupDependencies, additionalProps) : this.validatorUtils.validateProject(project, folder, additionalProps); // If any files in the project folder have been modified or removed, update the project zip if (reports.values().stream().anyMatch(r -> !r.getModifiedFiles().isEmpty() || !r.getRemovedFiles().isEmpty())) { updateProjectZip(archive, folder); } loader = this.flowLoaderFactory.createFlowLoader(folder); reports.put(DIRECTORY_FLOW_REPORT_KEY, loader.loadProjectFlow(project, folder)); // Check the validation report. if (!isReportStatusValid(reports, project)) { FlowLoaderUtils.cleanUpDir(folder); return reports; } // Upload the project to DB and storage. final File startupDependenciesOrNull = isThinProject ? startupDependencies : null; persistProject(project, loader, archive, folder, startupDependenciesOrNull, uploader, uploaderIPAddr); if (isThinProject) { // Mark that we uploaded a thin zip in the metrics. commonMetrics.markUploadThinProject(); } else { commonMetrics.markUploadFatProject(); } } finally { FlowLoaderUtils.cleanUpDir(folder); } // Clean up project old installations after new project is uploaded successfully. cleanUpProjectOldInstallations(project); return reports; } private void updateProjectZip(final File zipFile, final File folder) { try { File newZipFile = new File(zipFile.getAbsolutePath().concat(TMP_MODIFIED_ZIP_POSTFIX)); Utils.zipFolderContent(folder, newZipFile); FileUtils.deleteQuietly(zipFile); FileUtils.moveFile(newZipFile, zipFile); } catch (IOException e) { folder.deleteOnExit(); throw new ProjectManagerException("Error when generating the modified zip.", e); } } private File unzipProject(final File archive, final String fileType) throws ProjectManagerException { final File file; try { if (fileType == null) { throw new ProjectManagerException("Unknown file type for " + archive.getName()); } else if ("zip".equals(fileType)) { file = unzipFile(archive); } else { throw new ProjectManagerException("Unsupported archive type for file " + archive.getName()); } } catch (final IOException e) { throw new ProjectManagerException("Error unzipping file.", e); } return file; } private boolean isReportStatusValid(final Map<String, ValidationReport> reports, final Project project) { ValidationStatus status = ValidationStatus.PASS; for (final Entry<String, ValidationReport> report : reports.entrySet()) { if (report.getValue().getStatus().compareTo(status) > 0) { status = report.getValue().getStatus(); } } if (status == ValidationStatus.ERROR) { log.error("Error found in uploading to " + project.getName()); return false; } return true; } private void persistProject(final Project project, final FlowLoader loader, final File archive, final File projectDir, final File startupDependencies, final User uploader, final String uploaderIPAddr) throws ProjectManagerException { synchronized (project) { final int newProjectVersion = this.projectLoader.getLatestProjectVersion(project) + 1; final Map<String, Flow> flows = loader.getFlowMap(); for (final Flow flow : flows.values()) { flow.setProjectId(project.getId()); flow.setVersion(newProjectVersion); } this.projectStorageManager.uploadProject(project, newProjectVersion, archive, startupDependencies, uploader, uploaderIPAddr); log.info("Uploading flow to db for project " + archive.getName()); this.projectLoader.uploadFlows(project, newProjectVersion, flows.values()); log.info("Changing project versions for project " + archive.getName()); this.projectLoader.changeProjectVersion(project, newProjectVersion, uploader.getUserId()); project.setFlows(flows); if (loader instanceof DirectoryFlowLoader) { final DirectoryFlowLoader directoryFlowLoader = (DirectoryFlowLoader) loader; log.info("Uploading Job properties"); this.projectLoader.uploadProjectProperties(project, new ArrayList<>( directoryFlowLoader.getJobPropsMap().values())); log.info("Uploading Props properties"); this.projectLoader.uploadProjectProperties(project, directoryFlowLoader.getPropsList()); } else if (loader instanceof DirectoryYamlFlowLoader) { uploadFlowFilesRecursively(projectDir, project, newProjectVersion); } else { throw new ProjectManagerException("Invalid type of flow loader."); } this.projectLoader.postEvent(project, EventType.UPLOADED, uploader.getUserId(), "Uploaded project files zip " + archive.getName()); } } private void uploadFlowFilesRecursively(final File projectDir, final Project project, final int newProjectVersion) { for (final File file : projectDir.listFiles(new SuffixFilter(Constants.FLOW_FILE_SUFFIX))) { final int newFlowVersion = this.projectLoader .getLatestFlowVersion(project.getId(), newProjectVersion, file.getName()) + 1; this.projectLoader .uploadFlowFile(project.getId(), newProjectVersion, file, newFlowVersion); } for (final File file : projectDir.listFiles(new DirFilter())) { uploadFlowFilesRecursively(file, project, newProjectVersion); } } private void cleanUpProjectOldInstallations(final Project project) throws ProjectManagerException, ExecutorManagerException { log.info("Cleaning up old install files older than " + (project.getVersion() - this.projectVersionRetention)); final Map<Integer, Pair<ExecutionReference, ExecutableFlow>> unfinishedFlows = this.executorLoader .fetchUnfinishedFlowsMetadata(); final List<Integer> versionsWithUnfinishedExecutions = unfinishedFlows.values() .stream().map(pair -> pair.getSecond()) .filter(exflow -> exflow.getProjectId() == project.getId()) .map(exflow -> exflow.getVersion()) .collect(Collectors.toList()); this.projectLoader.cleanOlderProjectVersion(project.getId(), project.getVersion() - this.projectVersionRetention, versionsWithUnfinishedExecutions); // Clean up storage this.projectStorageManager.cleanupProjectArtifacts(project.getId(), versionsWithUnfinishedExecutions); } private File unzipFile(final File archiveFile) throws IOException { final ZipFile zipfile = new ZipFile(archiveFile); final File unzipped = Utils.createTempDir(this.tempDir); Utils.unzip(zipfile, unzipped); zipfile.close(); return unzipped; } public ProjectFileHandler getProjectFile(final Project project, int version) throws ProjectManagerException { if (version == -1) { version = this.projectLoader.getLatestProjectVersion(project); } return this.projectStorageManager.getProjectFile(project.getId(), version); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/CronSchedule.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import com.google.common.base.Preconditions; import java.io.Serializable; import java.util.TimeZone; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; /** * FlowTriggerSchedule is the logical representation of a cron-based schedule. It couldn't be * changed once gets constructed. It will be used to schedule a trigger. */ public class CronSchedule implements Serializable { /** * CAUTION : Please do NOT change this serialVersionUID as it may break * backward compatibility. */ private static final long serialVersionUID = -1330280892166841227L; private static final String DEFAULT_TIMEZONE = TimeZone.getDefault().getID(); private final String cronExpression; private final String timeZone; /** * @throws IllegalArgumentException if cronExpression is null or blank */ public CronSchedule(final String cronExpression) { this(cronExpression, DEFAULT_TIMEZONE); } /** * @throws IllegalArgumentException if cronExpression is null or blank */ public CronSchedule(final String cronExpression, String timeZone) { Preconditions.checkArgument(StringUtils.isNotBlank(cronExpression)); this.cronExpression = cronExpression; //todo chengren311: check cronExpression is valid: quartz has CronExpression.isValidExpression() this.timeZone = timeZone; } public String getCronExpression() { return this.cronExpression; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } final CronSchedule that = (CronSchedule) o; return new EqualsBuilder() .append(this.cronExpression, that.cronExpression) .append(this.getTimeZone(), that.getTimeZone()) .isEquals(); } @Override public int hashCode() { return new HashCodeBuilder(17, 37) .append(this.cronExpression) .toHashCode(); } public String getTimeZone() { if (null == timeZone) { return DEFAULT_TIMEZONE; } return timeZone; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/DirectoryFlowLoader.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.flow.CommonJobProperties; import azkaban.flow.ConditionOnJobStatus; import azkaban.flow.Edge; import azkaban.flow.Flow; import azkaban.flow.FlowProps; import azkaban.flow.Node; import azkaban.flow.SpecialJobTypes; import azkaban.project.FlowLoaderUtils.DirFilter; import azkaban.project.FlowLoaderUtils.SuffixFilter; import azkaban.project.validator.ValidationReport; import azkaban.utils.Props; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Loads job and properties files to flows from project directory. */ public class DirectoryFlowLoader implements FlowLoader { private static final String PROPERTY_SUFFIX = ".properties"; private static final String JOB_SUFFIX = ".job"; private static final Logger logger = LoggerFactory.getLogger(DirectoryFlowLoader.class); private final Props props; private final Set<String> errors = new HashSet<>(); private final Map<String, Flow> flowMap = new HashMap<>(); private HashSet<String> rootNodes; private HashMap<String, Node> nodeMap; private HashMap<String, Map<String, Edge>> nodeDependencies; private HashMap<String, Props> jobPropsMap; // Flow dependencies for embedded flows. private HashMap<String, Set<String>> flowDependencies; private ArrayList<FlowProps> flowPropsList; private ArrayList<Props> propsList; private Set<String> duplicateJobs; /** * Creates a new DirectoryFlowLoader. * * @param props Properties to add. */ public DirectoryFlowLoader(final Props props) { this.props = props; } /** * Returns the flow map constructed from the loaded flows. * * @return Map of flow name to Flow. */ @Override public Map<String, Flow> getFlowMap() { return this.flowMap; } /** * Returns errors caught when loading flows. * * @return Set of error strings. */ @Override public Set<String> getErrors() { return this.errors; } /** * Returns job properties. * * @return Map of job name to properties. */ public HashMap<String, Props> getJobPropsMap() { return this.jobPropsMap; } /** * Returns list of properties. * * @return List of Props. */ public ArrayList<Props> getPropsList() { return this.propsList; } /** * Loads all project flows from the directory. * * @param project The project. * @param projectDir The directory to load flows from. * @return the validation report. */ @Override public ValidationReport loadProjectFlow(final Project project, final File projectDir) { this.propsList = new ArrayList<>(); this.flowPropsList = new ArrayList<>(); this.jobPropsMap = new HashMap<>(); this.nodeMap = new HashMap<>(); this.duplicateJobs = new HashSet<>(); this.nodeDependencies = new HashMap<>(); this.rootNodes = new HashSet<>(); this.flowDependencies = new HashMap<>(); // Load all the props files and create the Node objects loadProjectFromDir(projectDir.getPath(), projectDir, null); // Create edges and find missing dependencies resolveDependencies(); // Create the flows. buildFlowsFromDependencies(); // Resolve embedded flows resolveEmbeddedFlows(); FlowLoaderUtils.checkJobProperties(project.getId(), this.props, this.jobPropsMap, this.errors); return FlowLoaderUtils.generateFlowLoaderReport(this.errors); } private void loadProjectFromDir(final String base, final File dir, Props parent) { final File[] propertyFiles = dir.listFiles(new SuffixFilter(PROPERTY_SUFFIX)); Arrays.sort(propertyFiles); for (final File file : propertyFiles) { final String relative = getRelativeFilePath(base, file.getPath()); try { parent = new Props(parent, file); parent.setSource(relative); final FlowProps flowProps = new FlowProps(parent); this.flowPropsList.add(flowProps); } catch (final IOException e) { this.errors.add("Error loading properties " + file.getName() + ":" + e.getMessage()); } this.logger.info("Adding " + relative); this.propsList.add(parent); } // Load all Job files. If there's a duplicate name, then we don't load final File[] jobFiles = dir.listFiles(new SuffixFilter(JOB_SUFFIX)); for (final File file : jobFiles) { final String jobName = getNameWithoutExtension(file); try { if (!this.duplicateJobs.contains(jobName)) { if (this.jobPropsMap.containsKey(jobName)) { this.errors.add("Duplicate job names found '" + jobName + "'."); this.duplicateJobs.add(jobName); this.jobPropsMap.remove(jobName); this.nodeMap.remove(jobName); } else { final Props prop = new Props(parent, file); final String relative = getRelativeFilePath(base, file.getPath()); prop.setSource(relative); final Node node = new Node(jobName); final String type = prop.getString("type", null); if (type == null) { this.errors.add("Job doesn't have type set '" + jobName + "'."); } node.setType(type); String condition = prop.getString("condition", null); if (null != condition && !condition.isEmpty()) { logger.info(String.format("Setting condition %s for job %s", condition, jobName)); node.setCondition(condition); } node.setJobSource(relative); if (parent != null) { node.setPropsSource(parent.getSource()); } // Force root node if (prop.getBoolean(CommonJobProperties.ROOT_NODE, false)) { this.rootNodes.add(jobName); } this.jobPropsMap.put(jobName, prop); this.nodeMap.put(jobName, node); } } } catch (final IOException e) { this.errors.add("Error loading job file " + file.getName() + ":" + e.getMessage()); } } validateConditions(); for (final File file : dir.listFiles(new DirFilter())) { loadProjectFromDir(base, file, parent); } } private void resolveEmbeddedFlows() { for (final String flowId : this.flowDependencies.keySet()) { final HashSet<String> visited = new HashSet<>(); resolveEmbeddedFlow(flowId, visited); } } private void resolveEmbeddedFlow(final String flowId, final Set<String> visited) { final Set<String> embeddedFlow = this.flowDependencies.get(flowId); if (embeddedFlow == null) { return; } visited.add(flowId); for (final String embeddedFlowId : embeddedFlow) { if (visited.contains(embeddedFlowId)) { this.errors.add("Embedded flow cycle found in " + flowId + "->" + embeddedFlowId); return; } else if (!this.flowMap.containsKey(embeddedFlowId)) { this.errors.add("Flow " + flowId + " depends on " + embeddedFlowId + " but can't be found."); return; } else { resolveEmbeddedFlow(embeddedFlowId, visited); } } visited.remove(flowId); } private void resolveDependencies() { // Add all the in edges and out edges. Catch bad dependencies and self // referrals. Also collect list of nodes who are parents. for (final Node node : this.nodeMap.values()) { final Props props = this.jobPropsMap.get(node.getId()); if (props == null) { this.logger.error("Job props not found!! For some reason."); continue; } final List<String> dependencyList = props.getStringList(CommonJobProperties.DEPENDENCIES, (List<String>) null); if (dependencyList != null) { Map<String, Edge> dependencies = this.nodeDependencies.get(node.getId()); if (dependencies == null) { dependencies = new HashMap<>(); for (String dependencyName : dependencyList) { dependencyName = dependencyName == null ? null : dependencyName.trim(); if (dependencyName == null || dependencyName.isEmpty()) { continue; } final Edge edge = new Edge(dependencyName, node.getId()); final Node dependencyNode = this.nodeMap.get(dependencyName); if (dependencyNode == null) { if (this.duplicateJobs.contains(dependencyName)) { edge.setError("Ambiguous Dependency. Duplicates found."); dependencies.put(dependencyName, edge); this.errors.add(node.getId() + " has ambiguous dependency " + dependencyName); } else { edge.setError("Dependency not found."); dependencies.put(dependencyName, edge); this.errors.add(node.getId() + " cannot find dependency " + dependencyName); } } else if (dependencyNode == node) { // We have a self cycle edge.setError("Self cycle found."); dependencies.put(dependencyName, edge); this.errors.add(node.getId() + " has a self cycle"); } else { dependencies.put(dependencyName, edge); } } if (!dependencies.isEmpty()) { this.nodeDependencies.put(node.getId(), dependencies); } } } } } private void buildFlowsFromDependencies() { // Find all root nodes by finding ones without dependents. final HashSet<String> nonRootNodes = new HashSet<>(); for (final Map<String, Edge> edges : this.nodeDependencies.values()) { for (final String sourceId : edges.keySet()) { nonRootNodes.add(sourceId); } } // Now create flows. Bad flows are marked invalid for (final Node base : this.nodeMap.values()) { // Root nodes can be discovered when parsing jobs if (this.rootNodes.contains(base.getId()) || !nonRootNodes.contains(base.getId())) { this.rootNodes.add(base.getId()); final Flow flow = new Flow(base.getId()); final Props jobProp = this.jobPropsMap.get(base.getId()); FlowLoaderUtils.addEmailPropsToFlow(flow, jobProp); flow.addAllFlowProperties(this.flowPropsList); final Set<String> visitedNodesOnPath = new HashSet<>(); final Set<String> visitedNodesEver = new HashSet<>(); constructFlow(flow, base, visitedNodesOnPath, visitedNodesEver); flow.initialize(); this.flowMap.put(base.getId(), flow); } } } private void constructFlow(final Flow flow, final Node node, final Set<String> visitedOnPath, final Set<String> visitedEver) { visitedOnPath.add(node.getId()); visitedEver.add(node.getId()); flow.addNode(node); flow.setCondition(node.getCondition()); if (SpecialJobTypes.EMBEDDED_FLOW_TYPE.equals(node.getType())) { final Props props = this.jobPropsMap.get(node.getId()); final String embeddedFlow = props.get(SpecialJobTypes.FLOW_NAME); Set<String> embeddedFlows = this.flowDependencies.get(flow.getId()); if (embeddedFlows == null) { embeddedFlows = new HashSet<>(); this.flowDependencies.put(flow.getId(), embeddedFlows); } node.setEmbeddedFlowId(embeddedFlow); embeddedFlows.add(embeddedFlow); } final Map<String, Edge> dependencies = this.nodeDependencies.get(node.getId()); if (dependencies != null) { for (Edge edge : dependencies.values()) { if (edge.hasError()) { flow.addEdge(edge); } else if (visitedOnPath.contains(edge.getSourceId())) { // We have a cycle. We set it as an error edge edge = new Edge(edge.getSourceId(), node.getId()); edge.setError("Cyclical dependencies found."); this.errors.add("Cyclical dependency found at " + edge.getId()); flow.addEdge(edge); } else if (visitedEver.contains(edge.getSourceId())) { // this node was already checked, don't need to check further flow.addEdge(edge); } else { // This should not be null flow.addEdge(edge); final Node sourceNode = this.nodeMap.get(edge.getSourceId()); constructFlow(flow, sourceNode, visitedOnPath, visitedEver); } } } visitedOnPath.remove(node.getId()); } private String getNameWithoutExtension(final File file) { final String filename = file.getName(); final int index = filename.lastIndexOf('.'); return index < 0 ? filename : filename.substring(0, index); } private String getRelativeFilePath(final String basePath, final String filePath) { return filePath.substring(basePath.length() + 1); } private void validateConditions() { nodeMap.forEach((name, node) -> { String condition = node.getCondition(); boolean foundConditionOnJobStatus = false; if (condition == null) { return; } // First, remove all the whitespaces and parenthesis (). final String replacedCondition = condition.replaceAll("\\s+|\\(|\\)", ""); // Second, split the condition by operators &&, ||, ==, !=, >, >=, <, <= final String[] operands = replacedCondition .split(DirectoryYamlFlowLoader.VALID_CONDITION_OPERATORS); // Third, check whether all the operands are valid: only conditionOnJobStatus macros, numbers, // strings, and variable substitution ${jobName:param} are allowed. for (int i = 0; i < operands.length; i++) { final Matcher matcher = DirectoryYamlFlowLoader.CONDITION_ON_JOB_STATUS_PATTERN .matcher(operands[i]); if (matcher.matches()) { this.logger.info("Operand " + operands[i] + " is a condition on job status."); if (foundConditionOnJobStatus) { this.errors.add("Invalid condition for " + node.getId() + ": cannot combine more than one conditionOnJobStatus macros."); } foundConditionOnJobStatus = true; node.setConditionOnJobStatus(ConditionOnJobStatus.fromString(matcher.group(1))); } else { if (operands[i].startsWith("!")) { // Remove the operator '!' from the operand. operands[i] = operands[i].substring(1); } if (operands[i].equals("")) { this.errors .add("Invalid condition fo" + " " + node.getId() + ": operand is an empty string."); } else if (!DirectoryYamlFlowLoader.DIGIT_STRING_PATTERN.matcher(operands[i]).matches()) { validateVariableSubstitution(operands[i], name); } } } }); } private void validateVariableSubstitution(final String operand, String name) { final Matcher matcher = DirectoryYamlFlowLoader.CONDITION_VARIABLE_REPLACEMENT_PATTERN .matcher(operand); if (matcher.matches()) { final String jobName = matcher.group(1); final Node conditionNode = nodeMap.get(jobName); if (conditionNode == null) { this.errors.add("Invalid condition for " + name + ": " + jobName + " doesn't exist in the flow."); } } else { this.errors.add("Invalid condition for " + name + ": cannot resolve the condition. Please check the syntax for supported conditions."); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/DirectoryYamlFlowLoader.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the “License”); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.Constants; import azkaban.flow.ConditionOnJobStatus; import azkaban.flow.Edge; import azkaban.flow.Flow; import azkaban.flow.FlowProps; import azkaban.flow.Node; import azkaban.project.FlowLoaderUtils.DirFilter; import azkaban.project.FlowLoaderUtils.SuffixFilter; import azkaban.project.validator.ValidationReport; import azkaban.utils.Props; import com.google.common.collect.ImmutableList; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Loads yaml files to flows from project directory. */ public class DirectoryYamlFlowLoader implements FlowLoader { // Pattern to match job variables in condition expressions: ${jobName:variable} public static final Pattern CONDITION_VARIABLE_REPLACEMENT_PATTERN = Pattern .compile("\\$\\{([^:{}]+):([^:{}]+)\\}"); // Pattern to match conditionOnJobStatus macros, e.g. one_success, all_done public static final Pattern CONDITION_ON_JOB_STATUS_PATTERN = Pattern.compile("(?i)\\b(" + StringUtils.join(ConditionOnJobStatus.values(), "|") + ")\\b"); // Pattern to match a number or a string, e.g. 1234, "hello", 'foo' public static final Pattern DIGIT_STRING_PATTERN = Pattern.compile("\\d+|'.*'|\".*\""); // Valid operators in condition expressions: &&, ||, ==, !=, >, >=, <, <= public static final String VALID_CONDITION_OPERATORS = "&&|\\|\\||==|!=|>|>=|<|<="; private static final Logger logger = LoggerFactory.getLogger(DirectoryYamlFlowLoader.class); private final Props props; private final Set<String> errors = new HashSet<>(); private final Map<String, Flow> flowMap = new HashMap<>(); private final Map<String, List<Edge>> edgeMap = new HashMap<>(); private final Map<String, Props> jobPropsMap = new HashMap<>(); /** * Creates a new DirectoryYamlFlowLoader. * * @param props Properties to add. */ public DirectoryYamlFlowLoader(final Props props) { this.props = props; } /** * Returns the flow map constructed from the loaded flows. * * @return Map of flow name to Flow. */ @Override public Map<String, Flow> getFlowMap() { return this.flowMap; } /** * Returns errors caught when loading flows. * * @return Set of error strings. */ @Override public Set<String> getErrors() { return this.errors; } /** * Returns the edge map constructed from the loaded flows. * * @return Map of flow name to all its Edges. */ public Map<String, List<Edge>> getEdgeMap() { return this.edgeMap; } /** * Loads all project flows from the directory. * * @param project The project. * @param projectDir The directory to load flows from. * @return the validation report. */ @Override public ValidationReport loadProjectFlow(final Project project, final File projectDir) { convertYamlFiles(projectDir); FlowLoaderUtils.checkJobProperties(project.getId(), this.props, this.jobPropsMap, this.errors); return FlowLoaderUtils.generateFlowLoaderReport(this.errors); } private void convertYamlFiles(final File projectDir) { // Todo jamiesjc: convert project yaml file. for (final File file : projectDir.listFiles(new SuffixFilter(Constants.FLOW_FILE_SUFFIX))) { final NodeBeanLoader loader = new NodeBeanLoader(); try { final NodeBean nodeBean = loader.load(file); if (!loader.validate(nodeBean)) { this.errors.add("Failed to validate nodeBean for " + file.getName() + ". Duplicate nodes found or dependency undefined."); } else { final AzkabanFlow azkabanFlow = (AzkabanFlow) loader.toAzkabanNode(nodeBean); if (this.flowMap.containsKey(azkabanFlow.getName())) { this.errors.add("Duplicate flows found in the project with name " + azkabanFlow .getName()); } else { final Flow flow = convertAzkabanFlowToFlow(azkabanFlow, azkabanFlow.getName(), file); this.flowMap.put(flow.getId(), flow); } } } catch (final Exception e) { this.errors.add("Error loading flow yaml file " + file.getName() + ":" + e.getMessage()); } } for (final File file : projectDir.listFiles(new DirFilter())) { convertYamlFiles(file); } } private Flow convertAzkabanFlowToFlow(final AzkabanFlow azkabanFlow, final String flowName, final File flowFile) { final Flow flow = new Flow(flowName); flow.setAzkabanFlowVersion(Constants.AZKABAN_FLOW_VERSION_2_0); final Props props = azkabanFlow.getProps(); FlowLoaderUtils.addEmailPropsToFlow(flow, props); props.setSource(flowFile.getName()); flow.addAllFlowProperties(ImmutableList.of(new FlowProps(props))); // Convert azkabanNodes to nodes inside the flow. azkabanFlow.getNodes().values().stream() .map(n -> convertAzkabanNodeToNode(n, flowName, flowFile, azkabanFlow)) .forEach(n -> flow.addNode(n)); // Add edges for the flow. buildFlowEdges(azkabanFlow, flowName); if (this.edgeMap.containsKey(flowName)) { flow.addAllEdges(this.edgeMap.get(flowName)); } // Todo jamiesjc: deprecate startNodes, endNodes and numLevels, and remove below method finally. // Blow method will construct startNodes, endNodes and numLevels for the flow. flow.initialize(); return flow; } private Node convertAzkabanNodeToNode(final AzkabanNode azkabanNode, final String flowName, final File flowFile, final AzkabanFlow azkabanFlow) { final Node node = new Node(azkabanNode.getName()); node.setType(azkabanNode.getType()); validateCondition(node, azkabanNode, azkabanFlow); node.setCondition(azkabanNode.getCondition()); node.setPropsSource(flowFile.getName()); node.setJobSource(flowFile.getName()); if (azkabanNode.getType().equals(Constants.FLOW_NODE_TYPE)) { final String embeddedFlowId = flowName + Constants.PATH_DELIMITER + node.getId(); node.setEmbeddedFlowId(embeddedFlowId); final Flow flowNode = convertAzkabanFlowToFlow((AzkabanFlow) azkabanNode, embeddedFlowId, flowFile); flowNode.setEmbeddedFlow(true); flowNode.setCondition(node.getCondition()); this.flowMap.put(flowNode.getId(), flowNode); } this.jobPropsMap .put(flowName + Constants.PATH_DELIMITER + node.getId(), azkabanNode.getProps()); return node; } private void buildFlowEdges(final AzkabanFlow azkabanFlow, final String flowName) { // Recursive stack to record searched nodes. Used for detecting dependency cycles. final HashSet<String> recStack = new HashSet<>(); // Nodes that have already been visited and added edges. final HashSet<String> visited = new HashSet<>(); for (final AzkabanNode node : azkabanFlow.getNodes().values()) { addEdges(node, azkabanFlow, flowName, recStack, visited); } } private void addEdges(final AzkabanNode node, final AzkabanFlow azkabanFlow, final String flowName, final HashSet<String> recStack, final HashSet<String> visited) { if (!visited.contains(node.getName())) { recStack.add(node.getName()); visited.add(node.getName()); final List<String> dependsOnList = node.getDependsOn(); for (final String parent : dependsOnList) { final Edge edge = new Edge(parent, node.getName()); if (!this.edgeMap.containsKey(flowName)) { this.edgeMap.put(flowName, new ArrayList<>()); } this.edgeMap.get(flowName).add(edge); if (recStack.contains(parent)) { // Cycles found, including self cycle. edge.setError("Cycles found."); this.errors.add("Cycles found at " + edge.getId()); } else { // Valid edge. Continue to process the parent node recursively. addEdges(azkabanFlow.getNode(parent), azkabanFlow, flowName, recStack, visited); } } recStack.remove(node.getName()); } } private void validateCondition(final Node node, final AzkabanNode azkabanNode, final AzkabanFlow azkabanFlow) { boolean foundConditionOnJobStatus = false; final String condition = azkabanNode.getCondition(); if (condition == null) { return; } // First, remove all the whitespaces and parenthesis (). final String replacedCondition = condition.replaceAll("\\s+|\\(|\\)", ""); // Second, split the condition by operators &&, ||, ==, !=, >, >=, <, <= final String[] operands = replacedCondition.split(VALID_CONDITION_OPERATORS); // Third, check whether all the operands are valid: only conditionOnJobStatus macros, numbers, // strings, and variable substitution ${jobName:param} are allowed. for (int i = 0; i < operands.length; i++) { final Matcher matcher = CONDITION_ON_JOB_STATUS_PATTERN.matcher(operands[i]); if (matcher.matches()) { this.logger.info("Operand " + operands[i] + " is a condition on job status."); if (foundConditionOnJobStatus) { this.errors.add("Invalid condition for " + node.getId() + ": cannot combine more than one conditionOnJobStatus macros."); } foundConditionOnJobStatus = true; node.setConditionOnJobStatus(ConditionOnJobStatus.fromString(matcher.group(1))); } else { if (operands[i].startsWith("!")) { // Remove the operator '!' from the operand. operands[i] = operands[i].substring(1); } if (operands[i].equals("")) { this.errors .add("Invalid condition for " + node.getId() + ": operand is an empty string."); } else if (!DIGIT_STRING_PATTERN.matcher(operands[i]).matches()) { validateVariableSubstitution(operands[i], azkabanNode, azkabanFlow); } } } } private void validateVariableSubstitution(final String operand, final AzkabanNode azkabanNode, final AzkabanFlow azkabanFlow) { final Matcher matcher = CONDITION_VARIABLE_REPLACEMENT_PATTERN.matcher(operand); if (matcher.matches()) { final String jobName = matcher.group(1); final AzkabanNode conditionNode = azkabanFlow.getNode(jobName); if (conditionNode == null) { this.errors.add("Invalid condition for " + azkabanNode.getName() + ": " + jobName + " doesn't exist in the flow."); } // If a job defines condition on its descendant nodes, then that condition is invalid. else if (isDescendantNode(conditionNode, azkabanNode, azkabanFlow)) { this.errors.add("Invalid condition for " + azkabanNode.getName() + ": should not define condition on its descendant node " + jobName + "."); } } else { this.errors.add("Invalid condition for " + azkabanNode.getName() + ": cannot resolve the condition. Please check the syntax for supported conditions."); } } private boolean isDescendantNode(final AzkabanNode current, final AzkabanNode target, final AzkabanFlow azkabanFlow) { // Check if the current node is a descendant of the target node. if (current == null || target == null) { return false; } else if (current.getDependsOn() == null) { return false; } else if (current.getDependsOn().contains(target.getName())) { return true; } else { for (final String nodeName : current.getDependsOn()) { if (isDescendantNode(azkabanFlow.getNode(nodeName), target, azkabanFlow)) { return true; } } } return false; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowLoader.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the “License”); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.flow.Flow; import azkaban.project.validator.ValidationReport; import java.io.File; import java.util.Map; import java.util.Set; /** * Interface to load project flows. */ public interface FlowLoader { /** * Loads all project flows from the directory. * * @param project The project. * @param projectDir The directory to load flows from. * @return the validation report. */ ValidationReport loadProjectFlow(final Project project, final File projectDir); /** * Returns the flow map constructed from the loaded flows. * * @return Map of flow name to Flow. */ Map<String, Flow> getFlowMap(); /** * Returns errors caught when loading flows. * * @return Set of error strings. */ Set<String> getErrors(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowLoaderFactory.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the “License”); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import static java.util.Objects.requireNonNull; import azkaban.Constants; import azkaban.project.FlowLoaderUtils.DirFilter; import azkaban.project.FlowLoaderUtils.SuffixFilter; import azkaban.utils.Props; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.Map; import javax.inject.Inject; import org.apache.commons.lang.ArrayUtils; import org.yaml.snakeyaml.Yaml; /** * Factory class to generate flow loaders. */ public class FlowLoaderFactory { private final Props props; /** * Instantiates a new Flow loader factory. * * @param props the props */ @Inject public FlowLoaderFactory(final Props props) { this.props = requireNonNull(props, "Props is null"); } /** * Creates flow loader based on project YAML file inside project directory. * * @param projectDir the project directory * @return the flow loader */ public FlowLoader createFlowLoader(final File projectDir) throws ProjectManagerException { if (checkForValidProjectYamlFile(projectDir)) { return new DirectoryYamlFlowLoader(this.props); } else { return new DirectoryFlowLoader(this.props); } } private boolean checkForValidProjectYamlFile(final File projectDir) throws ProjectManagerException { final File[] projectFileList = projectDir.listFiles(new SuffixFilter(Constants .PROJECT_FILE_SUFFIX)); if (projectFileList == null) { throw new ProjectManagerException("Error reading project directory. Input is not a " + "directory or IO error happens."); } if (ArrayUtils.isNotEmpty(projectFileList)) { if (projectFileList.length > 1) { throw new ProjectManagerException("Duplicate project YAML files found in the project " + "directory. Only one is allowed."); } final Map<String, Object> azkabanProject; try (FileInputStream fis = new FileInputStream(projectFileList[0])) { azkabanProject = (Map<String, Object>) new Yaml().load(fis); } catch (final IOException e) { throw new ProjectManagerException("Error reading project YAML file.", e); } if (azkabanProject == null || !azkabanProject .containsKey(Constants.ConfigurationKeys.AZKABAN_FLOW_VERSION)) { throw new ProjectManagerException("azkaban-flow-version is not specified in the project " + "YAML file."); } if (azkabanProject.get(Constants.ConfigurationKeys.AZKABAN_FLOW_VERSION).equals (Constants.AZKABAN_FLOW_VERSION_2_0)) { return true; } else { throw new ProjectManagerException("Invalid azkaban-flow-version in the project YAML file."); } } else { for (final File file : projectDir.listFiles(new DirFilter())) { if (checkForValidProjectYamlFile(file)) { return true; } } return false; } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowLoaderUtils.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the “License”); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an “AS IS” BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.Constants; import azkaban.flow.CommonJobProperties; import azkaban.flow.Flow; import azkaban.jobcallback.JobCallbackValidator; import azkaban.project.validator.ValidationReport; import azkaban.utils.MemConfValue; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Utils; import java.io.BufferedWriter; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.DumperOptions.FlowStyle; import org.yaml.snakeyaml.Yaml; /** * Utils to help load flows. */ public class FlowLoaderUtils { private static final Logger logger = LoggerFactory.getLogger(FlowLoaderUtils.class); private static final String XMS = "Xms"; private static final String XMX = "Xmx"; /** * Sets props in flow yaml file. * * @param path the flow or job path delimited by ":", e.g. "flow:subflow1:subflow2:job3" * @param flowFile the flow yaml file * @param prop the props to set */ public static void setPropsInYamlFile(final String path, final File flowFile, final Props prop) { final DumperOptions options = new DumperOptions(); options.setDefaultFlowStyle(FlowStyle.BLOCK); final NodeBean nodeBean = FlowLoaderUtils.setPropsInNodeBean(path, flowFile, prop); try (final BufferedWriter writer = Files .newBufferedWriter(flowFile.toPath(), StandardCharsets.UTF_8)) { new Yaml(options).dump(nodeBean, writer); } catch (final IOException e) { throw new ProjectManagerException( "Failed to set properties in flow file " + flowFile.getName()); } } /** * Sets props in node bean. * * @param path the flow or job path delimited by ":", e.g. "flow:subflow1:subflow2:job3" * @param flowFile the flow yaml file * @param prop the props to set * @return the node bean */ public static NodeBean setPropsInNodeBean(final String path, final File flowFile, final Props prop) { final NodeBeanLoader loader = new NodeBeanLoader(); try { final NodeBean nodeBean = loader.load(flowFile); final String[] pathList = path.split(Constants.PATH_DELIMITER); if (overridePropsInNodeBean(nodeBean, pathList, 0, prop)) { return nodeBean; } else { logger.error("Error setting props for " + path); } } catch (final Exception e) { logger.error("Failed to set props, error loading flow YAML file " + flowFile); } return null; } /** * Helper method to recursively find the node to override props. * * @param nodeBean the node bean * @param pathList the path list * @param idx the idx * @param prop the props to override * @return the boolean */ private static boolean overridePropsInNodeBean(final NodeBean nodeBean, final String[] pathList, final int idx, final Props prop) { if (idx < pathList.length && nodeBean.getName().equals(pathList[idx])) { if (idx == pathList.length - 1) { if (prop.containsKey(Constants.NODE_TYPE)) { nodeBean.setType(prop.get(Constants.NODE_TYPE)); } final Map<String, String> config = prop.getFlattened(); config.remove(Constants.NODE_TYPE); nodeBean.setConfig(config); return true; } for (final NodeBean bean : nodeBean.getNodes()) { if (overridePropsInNodeBean(bean, pathList, idx + 1, prop)) { return true; } } } return false; } /** * Gets flow or job props from flow yaml file. * * @param path the flow or job path delimited by ":", e.g. "flow:subflow1:subflow2:job3" * @param flowFile the flow yaml file * @return the props from yaml file */ public static Props getPropsFromYamlFile(final String path, final File flowFile) { final List<Props> propsList = new ArrayList<>(); final NodeBeanLoader loader = new NodeBeanLoader(); try { final NodeBean nodeBean = loader.load(flowFile); final String[] pathList = path.split(Constants.PATH_DELIMITER); if (findPropsFromNodeBean(nodeBean, pathList, 0, propsList)) { if (!propsList.isEmpty()) { return propsList.get(0); } else { logger.error("Error getting props for " + path); } } } catch (final Exception e) { logger.error("Failed to get props, error loading flow YAML file. ", e); } return null; } /** * Helper method to recursively find props from node bean. * * @param nodeBean the node bean * @param pathList the path list * @param idx the idx * @param propsList the props list * @return the boolean */ private static boolean findPropsFromNodeBean(final NodeBean nodeBean, final String[] pathList, final int idx, final List<Props> propsList) { if (idx < pathList.length && nodeBean.getName().equals(pathList[idx])) { if (idx == pathList.length - 1) { propsList.add(nodeBean.getProps()); return true; } for (final NodeBean bean : nodeBean.getNodes()) { if (findPropsFromNodeBean(bean, pathList, idx + 1, propsList)) { return true; } } } return false; } public static FlowTrigger getFlowTriggerFromYamlFile(final File flowFile) { final NodeBeanLoader loader = new NodeBeanLoader(); try { final NodeBean nodeBean = loader.load(flowFile); return loader.toFlowTrigger(nodeBean.getTrigger()); } catch (final Exception e) { logger.error("Failed to get flow trigger, error loading flow YAML file. ", e); } return null; } /** * Adds email properties to a flow. * * @param flow the flow * @param prop the prop */ public static void addEmailPropsToFlow(final Flow flow, final Props prop) { final List<String> successEmailList = prop.getStringList(CommonJobProperties.SUCCESS_EMAILS, Collections.EMPTY_LIST); final Set<String> successEmail = new HashSet<>(); for (final String email : successEmailList) { successEmail.add(email.toLowerCase()); } final List<String> failureEmailList = prop.getStringList(CommonJobProperties.FAILURE_EMAILS, Collections.EMPTY_LIST); final Set<String> failureEmail = new HashSet<>(); for (final String email : failureEmailList) { failureEmail.add(email.toLowerCase()); } final List<String> notifyEmailList = prop.getStringList(CommonJobProperties.NOTIFY_EMAILS, Collections.EMPTY_LIST); for (String email : notifyEmailList) { email = email.toLowerCase(); successEmail.add(email); failureEmail.add(email); } flow.addFailureEmails(failureEmail); flow.addSuccessEmails(successEmail); } /** * Generate flow loader report validation report. * * @param errors the errors * @return the validation report */ public static ValidationReport generateFlowLoaderReport(final Set<String> errors) { final ValidationReport report = new ValidationReport(); report.addErrorMsgs(errors); return report; } /** * Check job properties. * * @param projectId the project id * @param props the server props * @param jobPropsMap the job props map * @param errors the errors */ public static void checkJobProperties(final int projectId, final Props props, final Map<String, Props> jobPropsMap, final Set<String> errors) { // if project is in the memory check whitelist, then we don't need to check // its memory settings if (ProjectWhitelist.isProjectWhitelisted(projectId, ProjectWhitelist.WhitelistType.MemoryCheck)) { return; } final MemConfValue maxXms = MemConfValue.parseMaxXms(props); final MemConfValue maxXmx = MemConfValue.parseMaxXmx(props); for (final String jobName : jobPropsMap.keySet()) { final Props jobProps = jobPropsMap.get(jobName); final String xms = jobProps.getString(XMS, null); if (xms != null && !PropsUtils.isVariableReplacementPattern(xms) && Utils.parseMemString(xms) > maxXms.getSize()) { errors.add(String.format( "%s: Xms value has exceeded the allowed limit (max Xms = %s)", jobName, maxXms.getString())); } final String xmx = jobProps.getString(XMX, null); if (xmx != null && !PropsUtils.isVariableReplacementPattern(xmx) && Utils.parseMemString(xmx) > maxXmx.getSize()) { errors.add(String.format( "%s: Xmx value has exceeded the allowed limit (max Xmx = %s)", jobName, maxXmx.getString())); } // job callback properties check JobCallbackValidator.validate(jobName, props, jobProps, errors); } } /** * Clean up the directory. * * @param dir the directory to be deleted */ public static void cleanUpDir(final File dir) { try { if (dir != null && dir.exists()) { FileUtils.deleteDirectory(dir); } } catch (final IOException e) { logger.error("Failed to delete the directory", e); dir.deleteOnExit(); } } /** * Check if azkaban flow version is 2.0. * * @param azkabanFlowVersion the azkaban flow version * @return the boolean */ public static boolean isAzkabanFlowVersion20(final double azkabanFlowVersion) { return Double.compare(azkabanFlowVersion, Constants.AZKABAN_FLOW_VERSION_2_0) == 0; } /** * Implements Suffix filter. */ public static class SuffixFilter implements FileFilter { private final String suffix; /** * Instantiates a new Suffix filter. * * @param suffix the suffix */ public SuffixFilter(final String suffix) { this.suffix = suffix; } @Override public boolean accept(final File pathname) { final String name = pathname.getName(); return pathname.isFile() && !pathname.isHidden() && name.length() > this.suffix.length() && name.endsWith(this.suffix); } } /** * Implements Directory filter. */ public static class DirFilter implements FileFilter { @Override public boolean accept(final File pathname) { return pathname.isDirectory(); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowTrigger.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap.Builder; import java.io.Serializable; import java.time.Duration; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import javax.annotation.Nullable; import org.apache.commons.lang.StringUtils; /** * FlowTrigger is the logical representation of a trigger. * It couldn't be changed once gets constructed. * It will be used to create running trigger instance. */ public class FlowTrigger implements Serializable { private static final long serialVersionUID = 5613379236523054097L; private final Map<String, FlowTriggerDependency> dependencies; private final CronSchedule schedule; private final Duration maxWaitDuration; /** * @throws IllegalArgumentException if illegal argument is found or there is duplicate * dependency name or duplicate dependency type and params */ public FlowTrigger(final CronSchedule schedule, final List<FlowTriggerDependency> dependencies, @Nullable final Duration maxWaitDuration) { // will perform some basic validation here, and further validation will be performed on // parsing time when NodeBeanLoader parses the XML to flow trigger. Preconditions.checkNotNull(schedule, "schedule cannot be null"); Preconditions.checkNotNull(dependencies, "dependency cannot be null"); Preconditions.checkArgument(dependencies.isEmpty() || maxWaitDuration != null, "max wait " + "time cannot be null unless no dependency is defined"); validateDependencies(dependencies); this.schedule = schedule; final ImmutableMap.Builder builder = new Builder(); dependencies.forEach(dep -> builder.put(dep.getName(), dep)); this.dependencies = builder.build(); this.maxWaitDuration = maxWaitDuration; } /** * check uniqueness of dependency.name */ private void validateDepNameUniqueness(final List<FlowTriggerDependency> dependencies) { final Set<String> seen = new HashSet<>(); for (final FlowTriggerDependency dep : dependencies) { // set.add() returns false when there exists duplicate Preconditions.checkArgument(seen.add(dep.getName()), String.format("duplicate dependency" + ".name %s found, dependency.name should be unique", dep.getName())); } } @Override public String toString() { return "FlowTrigger{" + "schedule=" + this.schedule + ", maxWaitDurationInMins=" + this.maxWaitDuration + "\n " + StringUtils.join(this.dependencies.values(), "\n") + '}'; } /** * check uniqueness of dependency type and params */ private void validateDepDefinitionUniqueness(final List<FlowTriggerDependency> dependencies) { final Set<String> seen = new HashSet<>(); for (final FlowTriggerDependency dep : dependencies) { final Map<String, String> props = dep.getProps(); // set.add() returns false when there exists duplicate Preconditions.checkArgument(seen.add(dep.getType() + ":" + props.toString()), String.format ("duplicate dependency config %s found, dependency config should be unique", dep.getName())); } } private void validateDependencies(final List<FlowTriggerDependency> dependencies) { validateDepNameUniqueness(dependencies); validateDepDefinitionUniqueness(dependencies); } public FlowTriggerDependency getDependencyByName(final String name) { return this.dependencies.get(name); } public Collection<FlowTriggerDependency> getDependencies() { return this.dependencies.values(); } public Optional<Duration> getMaxWaitDuration() { return Optional.ofNullable(this.maxWaitDuration); } public CronSchedule getSchedule() { return this.schedule; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowTriggerBean.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import java.util.Collections; import java.util.List; import java.util.Map; /** * Java bean loaded from YAML file to represent a flow trigger. */ public class FlowTriggerBean { private Long maxWaitMins = null; private Map<String, String> schedule; private List<TriggerDependencyBean> triggerDependencies; public Long getMaxWaitMins() { return this.maxWaitMins; } public void setMaxWaitMins(final Long maxWaitMins) { this.maxWaitMins = maxWaitMins; } public Map<String, String> getSchedule() { return this.schedule; } public void setSchedule(final Map<String, String> schedule) { this.schedule = schedule; } public List<TriggerDependencyBean> getTriggerDependencies() { return this.triggerDependencies == null ? Collections.emptyList() : this.triggerDependencies; } public void setTriggerDependencies( final List<TriggerDependencyBean> triggerDependencies) { this.triggerDependencies = triggerDependencies; } @Override public String toString() { return "FlowTriggerBean{" + "maxWaitMins='" + this.maxWaitMins + '\'' + ", schedule=" + this.schedule + ", triggerDependencies=" + this.triggerDependencies + '}'; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/FlowTriggerDependency.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import com.google.common.base.Preconditions; import java.io.Serializable; import java.util.Collections; import java.util.Map; import org.apache.commons.lang.StringUtils; /** * FlowTriggerDependency is the logic representation of a trigger dependency. * It couldn't be changed once gets constructed. * It will be used to create running dependency instance. */ public class FlowTriggerDependency implements Serializable { private static final long serialVersionUID = 5875910030716100311L; private final Map<String, String> props; private final String name; private final String type; /** * @throws IllegalArgumentException if name or type is null or blank * @throws IllegalArgumentException if depProps is null */ public FlowTriggerDependency(final String name, final String type, final Map<String, String> depProps) { Preconditions.checkArgument(StringUtils.isNotBlank(name)); Preconditions.checkArgument(StringUtils.isNotBlank(type)); Preconditions.checkArgument(depProps != null); this.name = name; this.type = type; this.props = Collections.unmodifiableMap(depProps); //todo chengren311: validate per dependencyType: some dependency type might need extra special //check, also check if it's a valid dependency type } public String getName() { return this.name; } public String getType() { return this.type; } public Map<String, String> getProps() { return this.props; } @Override public String toString() { return "FlowTriggerDependency{" + "name='" + this.name + '\'' + ", type='" + this.type + '\'' + ", props=" + this.props + '}'; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/JdbcDependencyManager.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.db.DatabaseOperator; import azkaban.spi.Dependency; import azkaban.spi.FileValidationStatus; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.HashMap; import java.util.Map; import java.util.Set; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.dbutils.DbUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provides methods for interacting with dependency validation cache in DB. Used during thin archive * uploads. */ @Singleton public class JdbcDependencyManager { private static final Logger log = LoggerFactory.getLogger(JdbcDependencyManager.class); private final DatabaseOperator dbOperator; @Inject JdbcDependencyManager(final DatabaseOperator dbOperator) { this.dbOperator = dbOperator; } public Map<Dependency, FileValidationStatus> getValidationStatuses(final Set<Dependency> deps, final String validationKey) throws SQLException { Map<Dependency, FileValidationStatus> depValidationStatuses = new HashMap<>(); if (deps.isEmpty()) { // There's nothing for us to do. return depValidationStatuses; } // Map of (filename + sha1) -> Dependency for resolving the dependencies already cached in the DB // after the query completes. Map<String, Dependency> hashAndFileNameToDep = new HashMap<>(); Connection conn = null; ResultSet rs = null; PreparedStatement stmnt = null; // TODO: Use azkaban.db.DatabaseOperator.query() instead of getting the DB connection and // dealing with connection lifecycle. try { conn = this.dbOperator.getDataSource().getConnection(); if (conn == null) { throw new SQLException("Null connection"); } stmnt = conn.prepareStatement( String .format("SELECT file_name, file_sha1, validation_status FROM validated_dependencies " + "WHERE validation_key = ? AND (%s)", makeStrWithQuestionMarks(deps.size()))); // Set the first param, which is the validation_key stmnt.setString(1, validationKey); // Start at 2 because the first parameter is at index 1, and that is the validator key that we already set. int index = 2; for (Dependency d : deps) { stmnt.setString(index++, d.getFileName()); stmnt.setString(index++, d.getSHA1()); hashAndFileNameToDep.put(d.getFileName() + d.getSHA1(), d); } rs = stmnt.executeQuery(); while (rs.next()) { // Columns are (starting at index 1): file_name, file_sha1, validation_status Dependency d = hashAndFileNameToDep.remove(rs.getString(1) + rs.getString(2)); FileValidationStatus v = FileValidationStatus.valueOf(rs.getInt(3)); depValidationStatuses.put(d, v); } // All remaining dependencies in the hashToDep map should be marked as being NEW (because they weren't // associated with any DB entry) hashAndFileNameToDep.values().stream() .forEach(d -> depValidationStatuses.put(d, FileValidationStatus.NEW)); } catch (final SQLException ex) { log.error("Transaction failed: ", ex); throw ex; } finally { // Replicate the order of closing in org.apache.commons.dbutils.QueryRunner#query DbUtils.closeQuietly(conn, stmnt, rs); } return depValidationStatuses; } public void updateValidationStatuses(final Map<Dependency, FileValidationStatus> depValidationStatuses, final String validationKey) throws SQLException { if (depValidationStatuses.isEmpty()) { return; } // Order of columns: file_name, file_sha1, validation_key, validation_status Object[][] rowsToInsert = depValidationStatuses .keySet() .stream() .map(d -> new Object[]{d.getFileName(), d.getSHA1(), validationKey, depValidationStatuses.get(d).getValue()}) .toArray(Object[][]::new); // We use insert IGNORE because a another process may have been processing the same dependency // and written the row for a given dependency before we were able to (resulting in a duplicate primary key // error when we try to write the row), so this will ignore the error and continue persisting the other // dependencies. this.dbOperator.batch("INSERT IGNORE INTO validated_dependencies " + "(file_name, file_sha1, validation_key, validation_status) VALUES (?, ?, ?, ?)", rowsToInsert); } private static String makeStrWithQuestionMarks(final int num) { StringBuilder builder = new StringBuilder(); for(int i = 0; i < num; i++) { builder.append("(file_name = ? and file_sha1 = ?) or "); } // Remove trailing " or "; return builder.substring(0, builder.length() - 4); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/JdbcProjectHandlerSet.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.db.EncodingType; import azkaban.flow.Flow; import azkaban.spi.Dependency; import azkaban.user.Permission; import azkaban.utils.GZIPUtils; import azkaban.utils.InvalidHashException; import azkaban.utils.JSONUtils; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.ThinArchiveUtils; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.sql.Blob; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.dbutils.ResultSetHandler; import org.apache.commons.io.IOUtils; /** * This is a JDBC Handler collection place for all project handler classes. */ class JdbcProjectHandlerSet { public static class ProjectResultHandler implements ResultSetHandler<List<Project>> { private static final String BASE_QUERY = "SELECT " + "prj.id, prj.name, prj.active, prj.modified_time, prj.create_time, prj.version, prj.last_modified_by, prj.description, prj.enc_type, prj.settings_blob, " + "prm.name, prm.permissions, prm.isGroup " + "FROM projects prj "; // Still return the project if it has no associated permissions public static final String SELECT_PROJECT_BY_ID = BASE_QUERY + "LEFT JOIN project_permissions prm ON prj.id = prm.project_id WHERE prj.id=?"; // Still return the project if it has no associated permissions public static final String SELECT_ACTIVE_PROJECT_BY_NAME = BASE_QUERY + "LEFT JOIN project_permissions prm ON prj.id = prm.project_id WHERE prj.name=? AND prj.active=true"; // ONLY return projects that have at least one associated permission, this is for performance reasons. // (JOIN is way faster than LEFT JOIN) public static final String SELECT_ALL_ACTIVE_PROJECTS = BASE_QUERY + "JOIN project_permissions prm ON prj.id = prm.project_id WHERE prj.active=true"; @Override public List<Project> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } // Project ID -> Project final Map<Integer, Project> projects = new HashMap<>(); do { final int id = rs.getInt(1); // If a project has multiple permissions - the project will be returned multiple times, // one for each permission and we don't need to go through the work of reconstructing the // project object if we've already seen it. if (!projects.containsKey(id)) { // This project is new! final String name = rs.getString(2); final boolean active = rs.getBoolean(3); final long modifiedTime = rs.getLong(4); final long createTime = rs.getLong(5); final int version = rs.getInt(6); final String lastModifiedBy = rs.getString(7); final String description = rs.getString(8); final int encodingType = rs.getInt(9); final byte[] data = rs.getBytes(10); final Project project; if (data != null) { final EncodingType encType = EncodingType.fromInteger(encodingType); final Object blobObj; try { // Convoluted way to inflate strings. Should find common package or // helper function. if (encType == EncodingType.GZIP) { // Decompress the sucker. final String jsonString = GZIPUtils.unGzipString(data, "UTF-8"); blobObj = JSONUtils.parseJSONFromString(jsonString); } else { final String jsonString = new String(data, "UTF-8"); blobObj = JSONUtils.parseJSONFromString(jsonString); } project = Project.projectFromObject(blobObj); } catch (final IOException e) { throw new SQLException(String.format("Failed to get project with id: %d", id), e); } } else { project = new Project(id, name); } // update the fields as they may have changed project.setActive(active); project.setLastModifiedTimestamp(modifiedTime); project.setCreateTimestamp(createTime); project.setVersion(version); project.setLastModifiedUser(lastModifiedBy); project.setDescription(description); projects.put(id, project); } // Add the permission to the project final String username = rs.getString(11); final int permissionFlag = rs.getInt(12); final boolean isGroup = rs.getBoolean(13); // If username is not null, add the permission to the project // If username is null, we can assume that this row was returned without any associated permission // i.e. this project had no associated permissions. if (username != null) { Permission perm = new Permission(permissionFlag); if (isGroup) { projects.get(id).setGroupPermission(username, perm); } else { projects.get(id).setUserPermission(username, perm); } } } while (rs.next()); return new ArrayList<>(projects.values()); } } public static class ProjectFlowsResultHandler implements ResultSetHandler<List<Flow>> { public static String SELECT_PROJECT_FLOW = "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=? AND flow_id=?"; public static String SELECT_ALL_PROJECT_FLOWS = "SELECT project_id, version, flow_id, modified_time, encoding_type, json FROM project_flows WHERE project_id=? AND version=?"; @Override public List<Flow> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final ArrayList<Flow> flows = new ArrayList<>(); do { final String flowId = rs.getString(3); final int encodingType = rs.getInt(5); final byte[] dataBytes = rs.getBytes(6); if (dataBytes == null) { continue; } final EncodingType encType = EncodingType.fromInteger(encodingType); Object flowObj = null; try { // Convoluted way to inflate strings. Should find common package or // helper function. if (encType == EncodingType.GZIP) { // Decompress the sucker. final String jsonString = GZIPUtils.unGzipString(dataBytes, "UTF-8"); flowObj = JSONUtils.parseJSONFromString(jsonString); } else { final String jsonString = new String(dataBytes, "UTF-8"); flowObj = JSONUtils.parseJSONFromString(jsonString); } final Flow flow = Flow.flowFromObject(flowObj); flows.add(flow); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + flowId, e); } } while (rs.next()); return flows; } } public static class ProjectPropertiesResultsHandler implements ResultSetHandler<List<Pair<String, Props>>> { public static String SELECT_PROJECT_PROPERTY = "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=? AND name=?"; public static String SELECT_PROJECT_PROPERTIES = "SELECT project_id, version, name, modified_time, encoding_type, property FROM project_properties WHERE project_id=? AND version=?"; @Override public List<Pair<String, Props>> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<Pair<String, Props>> properties = new ArrayList<>(); do { final String name = rs.getString(3); final int eventType = rs.getInt(5); final byte[] dataBytes = rs.getBytes(6); final EncodingType encType = EncodingType.fromInteger(eventType); String propertyString = null; try { if (encType == EncodingType.GZIP) { // Decompress the sucker. propertyString = GZIPUtils.unGzipString(dataBytes, "UTF-8"); } else { propertyString = new String(dataBytes, "UTF-8"); } final Props props = PropsUtils.fromJSONString(propertyString); props.setSource(name); properties.add(new Pair<>(name, props)); } catch (final IOException e) { throw new SQLException(e); } } while (rs.next()); return properties; } } public static class ProjectLogsResultHandler implements ResultSetHandler<List<ProjectLogEvent>> { public static String SELECT_PROJECT_EVENTS_ORDER = "SELECT project_id, event_type, event_time, username, message FROM project_events WHERE project_id=? ORDER BY event_time DESC LIMIT ? OFFSET ?"; @Override public List<ProjectLogEvent> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final ArrayList<ProjectLogEvent> events = new ArrayList<>(); do { final int projectId = rs.getInt(1); final int eventType = rs.getInt(2); final long eventTime = rs.getLong(3); final String username = rs.getString(4); final String message = rs.getString(5); final ProjectLogEvent event = new ProjectLogEvent(projectId, ProjectLogEvent.EventType.fromInteger(eventType), eventTime, username, message); events.add(event); } while (rs.next()); return events; } } public static class ProjectFileChunkResultHandler implements ResultSetHandler<List<byte[]>> { public static String SELECT_PROJECT_CHUNKS_FILE = "SELECT project_id, version, chunk, size, file FROM project_files WHERE project_id=? AND version=? AND chunk >= ? AND chunk < ? ORDER BY chunk ASC"; @Override public List<byte[]> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final ArrayList<byte[]> data = new ArrayList<>(); do { final byte[] bytes = rs.getBytes(5); data.add(bytes); } while (rs.next()); return data; } } public static class ProjectVersionResultHandler implements ResultSetHandler<List<ProjectFileHandler>> { public static String SELECT_PROJECT_VERSION = "SELECT project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks," + " resource_id, startup_dependencies, uploader_ip_addr " + " FROM project_versions WHERE project_id=? AND version=?"; @Override public List<ProjectFileHandler> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return null; } final List<ProjectFileHandler> handlers = new ArrayList<>(); do { final int projectId = rs.getInt(1); final int version = rs.getInt(2); final long uploadTime = rs.getLong(3); final String uploader = rs.getString(4); final String fileType = rs.getString(5); final String fileName = rs.getString(6); final byte[] md5 = rs.getBytes(7); final int numChunks = rs.getInt(8); final String resourceId = rs.getString(9); final Blob startupDependenciesBlob = rs.getBlob(10); final String uploaderIpAddr = rs.getString(11); Set<Dependency> startupDependencies = Collections.emptySet(); if (startupDependenciesBlob != null) { try { startupDependencies = ThinArchiveUtils.parseStartupDependencies( IOUtils.toString(startupDependenciesBlob.getBinaryStream(), StandardCharsets.UTF_8)); } catch (IOException | InvalidHashException e) { // This should never happen unless the file is malformed in the database. // The file was already validated when the project was uploaded. throw new SQLException(e); } } final ProjectFileHandler handler = new ProjectFileHandler(projectId, version, uploadTime, uploader, fileType, fileName, numChunks, md5, startupDependencies, resourceId, uploaderIpAddr); handlers.add(handler); } while (rs.next()); return handlers; } } public static class IntHandler implements ResultSetHandler<Integer> { public static String SELECT_LATEST_VERSION = "SELECT MAX(version) FROM project_versions WHERE project_id=?"; public static String SELECT_LATEST_FLOW_VERSION = "SELECT MAX(flow_version) FROM " + "project_flow_files WHERE project_id=? AND project_version=? AND flow_name=?"; @Override public Integer handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return 0; } return rs.getInt(1); } } public static class FlowFileResultHandler implements ResultSetHandler<List<byte[]>> { public static String SELECT_FLOW_FILE = "SELECT flow_file FROM project_flow_files WHERE " + "project_id=? AND project_version=? AND flow_name=? AND flow_version=?"; public static String SELECT_ALL_FLOW_FILES = "SELECT flow_file FROM project_flow_files WHERE " + "project_id=? AND project_version=?"; @Override public List<byte[]> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<byte[]> data = new ArrayList<>(); do { final byte[] bytes = rs.getBytes(1); data.add(bytes); } while (rs.next()); return data; } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/JdbcProjectImpl.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import static azkaban.project.JdbcProjectHandlerSet.IntHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectFileChunkResultHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectFlowsResultHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectLogsResultHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectPropertiesResultsHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectResultHandler; import static azkaban.project.JdbcProjectHandlerSet.ProjectVersionResultHandler; import azkaban.Constants.ConfigurationKeys; import azkaban.db.DatabaseOperator; import azkaban.db.DatabaseTransOperator; import azkaban.db.EncodingType; import azkaban.db.SQLTransaction; import azkaban.flow.Flow; import azkaban.project.JdbcProjectHandlerSet.FlowFileResultHandler; import azkaban.project.ProjectLogEvent.EventType; import azkaban.user.Permission; import azkaban.user.User; import azkaban.utils.GZIPUtils; import azkaban.utils.HashUtils; import azkaban.utils.JSONUtils; import azkaban.utils.Pair; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Triple; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import javax.inject.Inject; import javax.inject.Singleton; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.log4j.Logger; /** * This class implements ProjectLoader using new azkaban-db code to allow DB failover. TODO * kunkun-tang: This class is too long. In future, we should split {@link ProjectLoader} interface * and have multiple short class implementations. */ @Singleton public class JdbcProjectImpl implements ProjectLoader { private static final Logger logger = Logger.getLogger(JdbcProjectImpl.class); private static final int CHUCK_SIZE = 1024 * 1024 * 10; // Flow yaml files are usually small, set size limitation to 10 MB should be sufficient for now. private static final int MAX_FLOW_FILE_SIZE_IN_BYTES = 1024 * 1024 * 10; private final DatabaseOperator dbOperator; private final File tempDir; private final EncodingType defaultEncodingType = EncodingType.GZIP; @Inject public JdbcProjectImpl(final Props props, final DatabaseOperator databaseOperator) { this.dbOperator = databaseOperator; this.tempDir = new File(props.getString("project.temp.dir", "temp")); if (!this.tempDir.exists()) { if (this.tempDir.mkdirs()) { logger.info("project temporary folder is being constructed."); } else { logger.info("project temporary folder already existed."); } } } @Override public List<Project> fetchAllActiveProjects() throws ProjectManagerException { final ProjectResultHandler handler = new ProjectResultHandler(); List<Project> projects = null; try { projects = this.dbOperator.query(ProjectResultHandler.SELECT_ALL_ACTIVE_PROJECTS, handler); } catch (final SQLException ex) { logger.error(ProjectResultHandler.SELECT_ALL_ACTIVE_PROJECTS + " failed.", ex); throw new ProjectManagerException("Error retrieving all active projects", ex); } return projects; } @Override public Project fetchProjectById(final int id) throws ProjectManagerException { Project project = null; final ProjectResultHandler handler = new ProjectResultHandler(); try { final List<Project> projects = this.dbOperator .query(ProjectResultHandler.SELECT_PROJECT_BY_ID, handler, id); if (projects.isEmpty()) { throw new ProjectManagerException("No project with id " + id + " exists in db."); } project = projects.get(0); } catch (final SQLException ex) { logger.error(ProjectResultHandler.SELECT_PROJECT_BY_ID + " failed.", ex); throw new ProjectManagerException("Query for existing project failed. Project " + id, ex); } return project; } @Override public Project fetchProjectByName(final String name) throws ProjectManagerException { Project project = null; final ProjectResultHandler handler = new ProjectResultHandler(); // At most one active project with the same name exists in db. try { final List<Project> projects = this.dbOperator .query(ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name); if (projects.isEmpty()) { return null; } project = projects.get(0); } catch (final SQLException ex) { logger.error(ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME + " failed.", ex); throw new ProjectManagerException( ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME + " failed.", ex); } return project; } /** * Creates a Project in the db. * * It will throw an exception if it finds an active project of the same name, or the SQL fails */ @Override public synchronized Project createNewProject(final String name, final String description, final User creator) throws ProjectManagerException { final ProjectResultHandler handler = new ProjectResultHandler(); // Check if the same project name exists. try { final List<Project> projects = this.dbOperator .query(ProjectResultHandler.SELECT_ACTIVE_PROJECT_BY_NAME, handler, name); if (!projects.isEmpty()) { throw new ProjectManagerException( "Active project with name " + name + " already exists in db."); } } catch (final SQLException ex) { logger.error(ex); throw new ProjectManagerException("Checking for existing project failed. " + name, ex); } final String INSERT_PROJECT = "INSERT INTO projects ( name, active, modified_time, create_time, version, last_modified_by, description, enc_type, settings_blob) values (?,?,?,?,?,?,?,?,?)"; final SQLTransaction<Integer> insertProject = transOperator -> { final long time = System.currentTimeMillis(); return transOperator .update(INSERT_PROJECT, name, true, time, time, null, creator.getUserId(), description, this.defaultEncodingType.getNumVal(), null); }; // Insert project try { final int numRowsInserted = this.dbOperator.transaction(insertProject); if (numRowsInserted == 0) { throw new ProjectManagerException("No projects have been inserted."); } } catch (final SQLException ex) { logger.error(INSERT_PROJECT + " failed.", ex); throw new ProjectManagerException("Insert project" + name + " for existing project failed. ", ex); } return fetchProjectByName(name); } @Override public void uploadProjectFile(final int projectId, final int version, final File localFile, final String uploader, final String uploaderIPAddr) throws ProjectManagerException { final long startMs = System.currentTimeMillis(); logger.info(String .format("Uploading Project ID: %d file: %s [%d bytes]", projectId, localFile.getName(), localFile.length())); /* * The below transaction uses one connection to do all operations. Ideally, we should commit * after the transaction completes. However, uploadFile needs to commit every time when we * upload any single chunk. * * Todo kunkun-tang: fix the transaction issue. */ final SQLTransaction<Integer> uploadProjectFileTransaction = transOperator -> { /* Step 1: Update DB with new project info */ // Database storage does not support thin archives, so we just set the startupDependencies file to null. addProjectToProjectVersions(transOperator, projectId, version, localFile, null, uploader, computeHash(localFile), null, uploaderIPAddr); transOperator.getConnection().commit(); /* Step 2: Upload File in chunks to DB */ final int chunks = uploadFileInChunks(transOperator, projectId, version, localFile); /* Step 3: Update number of chunks in DB */ updateChunksInProjectVersions(transOperator, projectId, version, chunks); return 1; }; try { this.dbOperator.transaction(uploadProjectFileTransaction); } catch (final SQLException e) { logger.error("upload project files failed.", e); throw new ProjectManagerException("upload project files failed.", e); } final long duration = (System.currentTimeMillis() - startMs) / 1000; logger.info(String.format("Uploaded Project ID: %d file: %s [%d bytes] in %d sec", projectId, localFile.getName(), localFile.length(), duration)); } private byte[] computeHash(final File localFile) { logger.info("Creating MD5 hash for upload " + localFile.getName()); final byte[] md5; try { md5 = HashUtils.MD5.getHashBytes(localFile); } catch (final IOException e) { throw new ProjectManagerException("Error getting MD5 hash.", e); } logger.info("MD5 hash created"); return md5; } @Override public void addProjectVersion(final int projectId, final int version, final File localFile, final File startupDependencies, final String uploader, final byte[] md5, final String resourceId, final String uploaderIPAddr) throws ProjectManagerException { // when one transaction completes, it automatically commits. final SQLTransaction<Integer> transaction = transOperator -> { addProjectToProjectVersions(transOperator, projectId, version, localFile, startupDependencies, uploader, md5, resourceId, uploaderIPAddr); return 1; }; try { this.dbOperator.transaction(transaction); } catch (final SQLException e) { logger.error("addProjectVersion failed.", e); throw new ProjectManagerException("addProjectVersion failed.", e); } } /** * Insert a new version record to TABLE project_versions before uploading files. * * The reason for this operation: When error chunking happens in remote mysql server, incomplete * file data remains in DB, and an SQL exception is thrown. If we don't have this operation before * uploading file, the SQL exception prevents AZ from creating the new version record in Table * project_versions. However, the Table project_files still reserve the incomplete files, which * causes troubles when uploading a new file: Since the version in TABLE project_versions is still * old, mysql will stop inserting new files to db. * * Why this operation is safe: When AZ uploads a new zip file, it always fetches the latest * version proj_v from TABLE project_version, proj_v+1 will be used as the new version for the * uploading files. * * Assume error chunking happens on day 1. proj_v is created for this bad file (old file version + * 1). When we upload a new project zip in day2, new file in day 2 will use the new version * (proj_v + 1). When file uploading completes, AZ will clean all old chunks in DB afterward. */ private void addProjectToProjectVersions( final DatabaseTransOperator transOperator, final int projectId, final int version, final File localFile, final File startupDependencies, final String uploader, final byte[] md5, final String resourceId, final String uploaderIPAddr) throws ProjectManagerException { final long updateTime = System.currentTimeMillis(); final String INSERT_PROJECT_VERSION = "INSERT INTO project_versions " + "(project_id, version, upload_time, uploader, file_type, file_name, md5, num_chunks, resource_id, " + "startup_dependencies, uploader_ip_addr) values (?,?,?,?,?,?,?,?,?,?,?)"; try { /* * As we don't know the num_chunks before uploading the file, we initialize it to 0, * and will update it after uploading completes. */ String lowercaseFileExtension = FilenameUtils.getExtension(localFile.getName()).toLowerCase(); // Get the startup dependencies input stream (or null if the file does not exist - indicating this is // a fat archive). InputStream startupDependenciesStream = getStartupDependenciesInputStream(startupDependencies); // Perform the DB update transOperator.update(INSERT_PROJECT_VERSION, projectId, version, updateTime, uploader, lowercaseFileExtension, localFile.getName(), md5, 0, resourceId, startupDependenciesStream, uploaderIPAddr); } catch (final SQLException e) { final String msg = String .format("Error initializing project id: %d version: %d ", projectId, version); logger.error(msg, e); throw new ProjectManagerException(msg, e); } } private InputStream getStartupDependenciesInputStream(File startupDependencies) { try { // If startupDependencies is null, we assume this is a fat archive and return null. If it is not null, // we assume the file exists and return an input stream for the file. return startupDependencies != null ? new FileInputStream(startupDependencies) : null; } catch (FileNotFoundException e) { // This shouldn't happen, the file should always exist if it is non-null. throw new RuntimeException(e); } } private int uploadFileInChunks(final DatabaseTransOperator transOperator, final int projectId, final int version, final File localFile) throws ProjectManagerException { // Really... I doubt we'll get a > 2gig file. So int casting it is! final byte[] buffer = new byte[CHUCK_SIZE]; final String INSERT_PROJECT_FILES = "INSERT INTO project_files (project_id, version, chunk, size, file) values (?,?,?,?,?)"; BufferedInputStream bufferedStream = null; int chunk = 0; try { bufferedStream = new BufferedInputStream(new FileInputStream(localFile)); int size = bufferedStream.read(buffer); while (size >= 0) { logger.info("Read bytes for " + localFile.getName() + " size:" + size); byte[] buf = buffer; if (size < buffer.length) { buf = Arrays.copyOfRange(buffer, 0, size); } try { logger.info("Running update for " + localFile.getName() + " chunk " + chunk); transOperator.update(INSERT_PROJECT_FILES, projectId, version, chunk, size, buf); /* * We enforce az committing to db when uploading every single chunk, * in order to reduce the transaction duration and conserve sql server resources. * * If the files to be uploaded is very large and we don't commit every single chunk, * the remote mysql server will run into memory troubles. */ transOperator.getConnection().commit(); logger.info("Finished update for " + localFile.getName() + " chunk " + chunk); } catch (final SQLException e) { throw new ProjectManagerException("Error Chunking during uploading files to db..."); } ++chunk; size = bufferedStream.read(buffer); } } catch (final IOException e) { throw new ProjectManagerException( String.format( "Error chunking file. projectId: %d, version: %d, file:%s[%d bytes], chunk: %d", projectId, version, localFile.getName(), localFile.length(), chunk)); } finally { IOUtils.closeQuietly(bufferedStream); } return chunk; } /** * we update num_chunks's actual number to db here. */ private void updateChunksInProjectVersions(final DatabaseTransOperator transOperator, final int projectId, final int version, final int chunk) throws ProjectManagerException { final String UPDATE_PROJECT_NUM_CHUNKS = "UPDATE project_versions SET num_chunks=? WHERE project_id=? AND version=?"; try { transOperator.update(UPDATE_PROJECT_NUM_CHUNKS, chunk, projectId, version); transOperator.getConnection().commit(); } catch (final SQLException e) { logger.error("Error updating project " + projectId + " : chunk_num " + chunk, e); throw new ProjectManagerException( "Error updating project " + projectId + " : chunk_num " + chunk, e); } } @Override public ProjectFileHandler fetchProjectMetaData(final int projectId, final int version) { final ProjectVersionResultHandler pfHandler = new ProjectVersionResultHandler(); try { final List<ProjectFileHandler> projectFiles = this.dbOperator .query(ProjectVersionResultHandler.SELECT_PROJECT_VERSION, pfHandler, projectId, version); if (projectFiles == null || projectFiles.isEmpty()) { return null; } return projectFiles.get(0); } catch (final SQLException ex) { logger.error("Query for uploaded file for project id " + projectId + " failed.", ex); throw new ProjectManagerException( "Query for uploaded file for project id " + projectId + " failed.", ex); } } @Override public ProjectFileHandler getUploadedFile(final int projectId, final int version) throws ProjectManagerException { final ProjectFileHandler projHandler = fetchProjectMetaData(projectId, version); if (projHandler == null) { return null; } final int numChunks = projHandler.getNumChunks(); if (numChunks <= 0) { throw new ProjectManagerException(String.format("Got numChunks=%s for version %s of project " + "%s - seems like this version has been cleaned up already, because enough newer " + "versions have been uploaded. To increase the retention of project versions, set " + "%s", numChunks, version, projectId, ConfigurationKeys.PROJECT_VERSION_RETENTION)); } BufferedOutputStream bStream = null; File file; try { try { file = File .createTempFile(projHandler.getFileName(), String.valueOf(version), this.tempDir); bStream = new BufferedOutputStream(new FileOutputStream(file)); } catch (final IOException e) { throw new ProjectManagerException("Error creating temp file for stream."); } final int collect = 5; int fromChunk = 0; int toChunk = collect; do { final ProjectFileChunkResultHandler chunkHandler = new ProjectFileChunkResultHandler(); List<byte[]> data = null; try { data = this.dbOperator .query(ProjectFileChunkResultHandler.SELECT_PROJECT_CHUNKS_FILE, chunkHandler, projectId, version, fromChunk, toChunk); } catch (final SQLException e) { logger.error(e); throw new ProjectManagerException("Query for uploaded file for " + projectId + " failed.", e); } try { for (final byte[] d : data) { bStream.write(d); } } catch (final IOException e) { throw new ProjectManagerException("Error writing file", e); } // Add all the bytes to the stream. fromChunk += collect; toChunk += collect; } while (fromChunk <= numChunks); } finally { IOUtils.closeQuietly(bStream); } // Check md5. final byte[] md5; try { md5 = HashUtils.MD5.getHashBytes(file); } catch (final IOException e) { throw new ProjectManagerException("Error getting MD5 hash.", e); } if (Arrays.equals(projHandler.getMD5Hash(), md5)) { logger.info("Md5 Hash is valid"); } else { throw new ProjectManagerException( String.format("Md5 Hash failed on project %s version %s retrieval of file %s. " + "Expected hash: %s , got hash: %s", projHandler.getProjectId(), projHandler.getVersion(), file.getAbsolutePath(), Arrays.toString(projHandler.getMD5Hash()), Arrays.toString(md5))); } projHandler.setLocalFile(file); return projHandler; } @Override public void changeProjectVersion(final Project project, final int version, final String user) throws ProjectManagerException { final long timestamp = System.currentTimeMillis(); try { final String UPDATE_PROJECT_VERSION = "UPDATE projects SET version=?,modified_time=?,last_modified_by=? WHERE id=?"; this.dbOperator.update(UPDATE_PROJECT_VERSION, version, timestamp, user, project.getId()); project.setVersion(version); project.setLastModifiedTimestamp(timestamp); project.setLastModifiedUser(user); } catch (final SQLException e) { logger.error("Error updating switching project version " + project.getName(), e); throw new ProjectManagerException( "Error updating switching project version " + project.getName(), e); } } @Override public void updatePermission(final Project project, final String name, final Permission perm, final boolean isGroup) throws ProjectManagerException { final long updateTime = System.currentTimeMillis(); try { if (this.dbOperator.getDataSource().allowsOnDuplicateKey()) { final String INSERT_PROJECT_PERMISSION = "INSERT INTO project_permissions (project_id, modified_time, name, permissions, isGroup) values (?,?,?,?,?)" + "ON DUPLICATE KEY UPDATE modified_time = VALUES(modified_time), permissions = VALUES(permissions)"; this.dbOperator .update(INSERT_PROJECT_PERMISSION, project.getId(), updateTime, name, perm.toFlags(), isGroup); } else { final String MERGE_PROJECT_PERMISSION = "MERGE INTO project_permissions (project_id, modified_time, name, permissions, isGroup) KEY (project_id, name) values (?,?,?,?,?)"; this.dbOperator .update(MERGE_PROJECT_PERMISSION, project.getId(), updateTime, name, perm.toFlags(), isGroup); } } catch (final SQLException ex) { logger.error("Error updating project permission", ex); throw new ProjectManagerException( "Error updating project " + project.getName() + " permissions for " + name, ex); } if (isGroup) { project.setGroupPermission(name, perm); } else { project.setUserPermission(name, perm); } } @Override public void updateProjectSettings(final Project project) throws ProjectManagerException { updateProjectSettings(project, this.defaultEncodingType); } private byte[] convertJsonToBytes(final EncodingType type, final String json) throws IOException { byte[] data = json.getBytes("UTF-8"); if (type == EncodingType.GZIP) { data = GZIPUtils.gzipBytes(data); } return data; } private void updateProjectSettings(final Project project, final EncodingType encType) throws ProjectManagerException { final String UPDATE_PROJECT_SETTINGS = "UPDATE projects SET enc_type=?, settings_blob=? WHERE id=?"; final String json = JSONUtils.toJSON(project.toObject()); byte[] data = null; try { data = convertJsonToBytes(encType, json); logger.debug("NumChars: " + json.length() + " Gzip:" + data.length); } catch (final IOException e) { throw new ProjectManagerException("Failed to encode. ", e); } try { this.dbOperator.update(UPDATE_PROJECT_SETTINGS, encType.getNumVal(), data, project.getId()); } catch (final SQLException e) { logger.error("update Project Settings failed.", e); throw new ProjectManagerException( "Error updating project " + project.getName() + " version " + project.getVersion(), e); } } @Override public void removePermission(final Project project, final String name, final boolean isGroup) throws ProjectManagerException { final String DELETE_PROJECT_PERMISSION = "DELETE FROM project_permissions WHERE project_id=? AND name=? AND isGroup=?"; try { this.dbOperator.update(DELETE_PROJECT_PERMISSION, project.getId(), name, isGroup); } catch (final SQLException e) { logger.error("remove Permission failed.", e); throw new ProjectManagerException( "Error deleting project " + project.getName() + " permissions for " + name, e); } if (isGroup) { project.removeGroupPermission(name); } else { project.removeUserPermission(name); } } /** * Todo kunkun-tang: the below implementation doesn't remove a project, but inactivate a project. * We should rewrite the code to follow the literal meanings. */ @Override public void removeProject(final Project project, final String user) throws ProjectManagerException { final long updateTime = System.currentTimeMillis(); final String UPDATE_INACTIVE_PROJECT = "UPDATE projects SET active=false,modified_time=?,last_modified_by=? WHERE id=?"; try { this.dbOperator.update(UPDATE_INACTIVE_PROJECT, updateTime, user, project.getId()); } catch (final SQLException e) { logger.error("error remove project " + project.getName(), e); throw new ProjectManagerException("Error remove project " + project.getName(), e); } } @Override public boolean postEvent(final Project project, final EventType type, final String user, final String message) { final String INSERT_PROJECT_EVENTS = "INSERT INTO project_events (project_id, event_type, event_time, username, message) values (?,?,?,?,?)"; final long updateTime = System.currentTimeMillis(); try { this.dbOperator .update(INSERT_PROJECT_EVENTS, project.getId(), type.getNumVal(), updateTime, user, message); } catch (final SQLException e) { logger.error("post event failed,", e); return false; } return true; } @Override public List<ProjectLogEvent> getProjectEvents(final Project project, final int num, final int skip) throws ProjectManagerException { final ProjectLogsResultHandler logHandler = new ProjectLogsResultHandler(); List<ProjectLogEvent> events = null; try { events = this.dbOperator .query(ProjectLogsResultHandler.SELECT_PROJECT_EVENTS_ORDER, logHandler, project.getId(), num, skip); } catch (final SQLException e) { logger.error("Error getProjectEvents, project " + project.getName(), e); throw new ProjectManagerException("Error getProjectEvents, project " + project.getName(), e); } return events; } @Override public void updateDescription(final Project project, final String description, final String user) throws ProjectManagerException { final String UPDATE_PROJECT_DESCRIPTION = "UPDATE projects SET description=?,modified_time=?,last_modified_by=? WHERE id=?"; final long updateTime = System.currentTimeMillis(); try { this.dbOperator .update(UPDATE_PROJECT_DESCRIPTION, description, updateTime, user, project.getId()); project.setDescription(description); project.setLastModifiedTimestamp(updateTime); project.setLastModifiedUser(user); } catch (final SQLException e) { logger.error(e); throw new ProjectManagerException("Error update Description, project " + project.getName(), e); } } @Override public int getLatestProjectVersion(final Project project) throws ProjectManagerException { final IntHandler handler = new IntHandler(); try { return this.dbOperator.query(IntHandler.SELECT_LATEST_VERSION, handler, project.getId()); } catch (final SQLException e) { logger.error(e); throw new ProjectManagerException( "Error marking project " + project.getName() + " as inactive", e); } } @Override public void uploadFlows(final Project project, final int version, final Collection<Flow> flows) throws ProjectManagerException { // We do one at a time instead of batch... because well, the batch could be // large. logger.info("Uploading flows"); try { for (final Flow flow : flows) { uploadFlow(project, version, flow, this.defaultEncodingType); } } catch (final IOException e) { throw new ProjectManagerException("Flow Upload failed.", e); } } @Override public void uploadFlow(final Project project, final int version, final Flow flow) throws ProjectManagerException { logger.info("Uploading flow " + flow.getId()); try { uploadFlow(project, version, flow, this.defaultEncodingType); } catch (final IOException e) { throw new ProjectManagerException("Flow Upload failed.", e); } } @Override public void updateFlow(final Project project, final int version, final Flow flow) throws ProjectManagerException { logger.info("Uploading flow " + flow.getId()); try { final String json = JSONUtils.toJSON(flow.toObject()); final byte[] data = convertJsonToBytes(this.defaultEncodingType, json); logger.info("Flow upload " + flow.getId() + " is byte size " + data.length); final String UPDATE_FLOW = "UPDATE project_flows SET encoding_type=?,json=? WHERE project_id=? AND version=? AND flow_id=?"; try { this.dbOperator .update(UPDATE_FLOW, this.defaultEncodingType.getNumVal(), data, project.getId(), version, flow.getId()); } catch (final SQLException e) { logger.error("Error inserting flow", e); throw new ProjectManagerException("Error inserting flow " + flow.getId(), e); } } catch (final IOException e) { throw new ProjectManagerException("Flow Upload failed.", e); } } private void uploadFlow(final Project project, final int version, final Flow flow, final EncodingType encType) throws ProjectManagerException, IOException { final String json = JSONUtils.toJSON(flow.toObject()); final byte[] data = convertJsonToBytes(encType, json); logger.info("Flow upload " + flow.getId() + " is byte size " + data.length); final String INSERT_FLOW = "INSERT INTO project_flows (project_id, version, flow_id, modified_time, encoding_type, json) values (?,?,?,?,?,?)"; try { this.dbOperator .update(INSERT_FLOW, project.getId(), version, flow.getId(), System.currentTimeMillis(), encType.getNumVal(), data); } catch (final SQLException e) { logger.error("Error inserting flow", e); throw new ProjectManagerException("Error inserting flow " + flow.getId(), e); } } @Override public Flow fetchFlow(final Project project, final String flowId) throws ProjectManagerException { throw new UnsupportedOperationException("this method has not been instantiated."); } @Override public List<Flow> fetchAllProjectFlows(final Project project) throws ProjectManagerException { return fetchAllFlowsForProjects(Arrays.asList(project)).get(project); } @Override public Map<Project, List<Flow>> fetchAllFlowsForProjects(final List<Project> projects) throws ProjectManagerException { final SQLTransaction<Map<Project, List<Flow>>> transaction = transOperator -> { Map<Project, List<Flow>> projectToFlows = new HashMap(); for (Project p : projects) { projectToFlows.put(p, transOperator .query(ProjectFlowsResultHandler.SELECT_ALL_PROJECT_FLOWS, new ProjectFlowsResultHandler(), p.getId(), p.getVersion())); } return projectToFlows; }; try { return this.dbOperator.transaction(transaction); } catch (final SQLException e) { throw new ProjectManagerException( "Error fetching flows for " + projects.size() + " project(s).", e); } } @Override public void uploadProjectProperties(final Project project, final List<Props> properties) throws ProjectManagerException { for (final Props props : properties) { try { uploadProjectProperty(project, props.getSource(), props); } catch (final IOException e) { throw new ProjectManagerException("Error uploading project property file", e); } } } @Override public void uploadProjectProperty(final Project project, final Props props) throws ProjectManagerException { try { uploadProjectProperty(project, props.getSource(), props); } catch (final IOException e) { throw new ProjectManagerException("Error uploading project property file", e); } } @Override public void updateProjectProperty(final Project project, final Props props) throws ProjectManagerException { try { updateProjectProperty(project, props.getSource(), props); } catch (final IOException e) { throw new ProjectManagerException("Error uploading project property file", e); } } private void updateProjectProperty(final Project project, final String name, final Props props) throws ProjectManagerException, IOException { final String UPDATE_PROPERTIES = "UPDATE project_properties SET property=? WHERE project_id=? AND version=? AND name=?"; final byte[] propsData = getBytes(props); try { this.dbOperator .update(UPDATE_PROPERTIES, propsData, project.getId(), project.getVersion(), name); } catch (final SQLException e) { throw new ProjectManagerException( "Error updating property " + project.getName() + " version " + project.getVersion(), e); } } private void uploadProjectProperty(final Project project, final String name, final Props props) throws ProjectManagerException, IOException { final String INSERT_PROPERTIES = "INSERT INTO project_properties (project_id, version, name, modified_time, encoding_type, property) values (?,?,?,?,?,?)"; final byte[] propsData = getBytes(props); try { this.dbOperator.update(INSERT_PROPERTIES, project.getId(), project.getVersion(), name, System.currentTimeMillis(), this.defaultEncodingType.getNumVal(), propsData); } catch (final SQLException e) { throw new ProjectManagerException( "Error uploading project properties " + name + " into " + project.getName() + " version " + project.getVersion(), e); } } private byte[] getBytes(final Props props) throws IOException { final String propertyJSON = PropsUtils.toJSONString(props, true); byte[] data = propertyJSON.getBytes("UTF-8"); if (this.defaultEncodingType == EncodingType.GZIP) { data = GZIPUtils.gzipBytes(data); } return data; } @Override public Props fetchProjectProperty(final int projectId, final int projectVer, final String propsName) throws ProjectManagerException { final ProjectPropertiesResultsHandler handler = new ProjectPropertiesResultsHandler(); try { final List<Pair<String, Props>> properties = this.dbOperator .query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTY, handler, projectId, projectVer, propsName); if (properties == null || properties.isEmpty()) { logger.debug("Project " + projectId + " version " + projectVer + " property " + propsName + " is empty."); return null; } return properties.get(0).getSecond(); } catch (final SQLException e) { logger.error("Error fetching property " + propsName + " Project " + projectId + " version " + projectVer, e); throw new ProjectManagerException("Error fetching property " + propsName, e); } } @Override public Props fetchProjectProperty(final Project project, final String propsName) throws ProjectManagerException { return fetchProjectProperty(project.getId(), project.getVersion(), propsName); } @Override public Map<String, Props> fetchProjectProperties(final int projectId, final int version) throws ProjectManagerException { try { final List<Pair<String, Props>> properties = this.dbOperator .query(ProjectPropertiesResultsHandler.SELECT_PROJECT_PROPERTIES, new ProjectPropertiesResultsHandler(), projectId, version); if (properties == null || properties.isEmpty()) { return null; } final HashMap<String, Props> props = new HashMap<>(); for (final Pair<String, Props> pair : properties) { props.put(pair.getFirst(), pair.getSecond()); } return props; } catch (final SQLException e) { logger.error("Error fetching properties, project id" + projectId + " version " + version, e); throw new ProjectManagerException("Error fetching properties", e); } } @Override public void cleanOlderProjectVersion(final int projectId, final int version, final List<Integer> excludedVersions) throws ProjectManagerException { // Would use param of type Array from transOperator.getConnection().createArrayOf() but // h2 doesn't support the Array type, so format the filter manually. final String EXCLUDED_VERSIONS_FILTER = excludedVersions.stream() .map(excluded -> " AND version != " + excluded).collect(Collectors.joining()); final String VERSION_FILTER = " AND version < ?" + EXCLUDED_VERSIONS_FILTER; final String DELETE_FLOW = "DELETE FROM project_flows WHERE project_id=?" + VERSION_FILTER; final String DELETE_PROPERTIES = "DELETE FROM project_properties WHERE project_id=?" + VERSION_FILTER; final String DELETE_PROJECT_FILES = "DELETE FROM project_files WHERE project_id=?" + VERSION_FILTER; final String UPDATE_PROJECT_VERSIONS = "UPDATE project_versions SET num_chunks=0 WHERE project_id=?" + VERSION_FILTER; // Todo jamiesjc: delete flow files final SQLTransaction<Integer> cleanOlderProjectTransaction = transOperator -> { transOperator.update(DELETE_FLOW, projectId, version); transOperator.update(DELETE_PROPERTIES, projectId, version); transOperator.update(DELETE_PROJECT_FILES, projectId, version); return transOperator.update(UPDATE_PROJECT_VERSIONS, projectId, version); }; try { final int res = this.dbOperator.transaction(cleanOlderProjectTransaction); if (res == 0) { logger.info("clean older project given project id " + projectId + " doesn't take effect."); } } catch (final SQLException e) { logger.error("clean older project transaction failed", e); throw new ProjectManagerException("clean older project transaction failed", e); } } @Override public void uploadFlowFile(final int projectId, final int projectVersion, final File flowFile, final int flowVersion) throws ProjectManagerException { logger.info(String .format( "Uploading flow file %s, version %d for project %d, version %d, file length is [%d bytes]", flowFile.getName(), flowVersion, projectId, projectVersion, flowFile.length())); if (flowFile.length() > MAX_FLOW_FILE_SIZE_IN_BYTES) { throw new ProjectManagerException("Flow file length exceeds 10 MB limit."); } final byte[] buffer = new byte[MAX_FLOW_FILE_SIZE_IN_BYTES]; final String INSERT_FLOW_FILES = "INSERT INTO project_flow_files (project_id, project_version, flow_name, flow_version, " + "modified_time, " + "flow_file) values (?,?,?,?,?,?)"; try (final FileInputStream input = new FileInputStream(flowFile); final BufferedInputStream bufferedStream = new BufferedInputStream(input)) { final int size = bufferedStream.read(buffer); logger.info("Read bytes for " + flowFile.getName() + ", size:" + size); final byte[] buf = Arrays.copyOfRange(buffer, 0, size); try { this.dbOperator .update(INSERT_FLOW_FILES, projectId, projectVersion, flowFile.getName(), flowVersion, System.currentTimeMillis(), buf); } catch (final SQLException e) { throw new ProjectManagerException( "Error uploading flow file " + flowFile.getName() + ", version " + flowVersion + ".", e); } } catch (final IOException e) { throw new ProjectManagerException( String.format( "Error reading flow file %s, version: %d, length: [%d bytes].", flowFile.getName(), flowVersion, flowFile.length())); } } @Override public File getUploadedFlowFile(final int projectId, final int projectVersion, final String flowFileName, final int flowVersion, final File tempDir) throws ProjectManagerException, IOException { final FlowFileResultHandler handler = new FlowFileResultHandler(); final List<byte[]> data; // Created separate temp directory for each flow file to avoid overwriting the same file by // multiple threads concurrently. Flow file name will be interpret as the flow name when // parsing the yaml flow file, so it has to be specific. final File file = new File(tempDir, flowFileName); try (final FileOutputStream output = new FileOutputStream(file); final BufferedOutputStream bufferedStream = new BufferedOutputStream(output)) { try { data = this.dbOperator .query(FlowFileResultHandler.SELECT_FLOW_FILE, handler, projectId, projectVersion, flowFileName, flowVersion); } catch (final SQLException e) { throw new ProjectManagerException( "Failed to query uploaded flow file for project " + projectId + " version " + projectVersion + ", flow file " + flowFileName + " version " + flowVersion, e); } if (data == null || data.isEmpty()) { throw new ProjectManagerException( "No flow file could be found in DB table for project " + projectId + " version " + projectVersion + ", flow file " + flowFileName + " version " + flowVersion); } bufferedStream.write(data.get(0)); } catch (final IOException e) { throw new ProjectManagerException( "Error writing to output stream for project " + projectId + " version " + projectVersion + ", flow file " + flowFileName + " version " + flowVersion, e); } return file; } @Override public int getLatestFlowVersion(final int projectId, final int projectVersion, final String flowName) throws ProjectManagerException { final IntHandler handler = new IntHandler(); try { return this.dbOperator.query(IntHandler.SELECT_LATEST_FLOW_VERSION, handler, projectId, projectVersion, flowName); } catch (final SQLException e) { logger.error(e); throw new ProjectManagerException( "Error selecting latest flow version from project " + projectId + ", version " + projectVersion + ", flow " + flowName + ".", e); } } @Override public boolean isFlowFileUploaded(final int projectId, final int projectVersion) throws ProjectManagerException { final FlowFileResultHandler handler = new FlowFileResultHandler(); final List<byte[]> data; try { data = this.dbOperator .query(FlowFileResultHandler.SELECT_ALL_FLOW_FILES, handler, projectId, projectVersion); } catch (final SQLException e) { logger.error(e); throw new ProjectManagerException("Failed to query uploaded flow files ", e); } return !data.isEmpty(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/NodeBean.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import azkaban.Constants; import azkaban.utils.Props; import java.io.Serializable; import java.util.List; import java.util.Map; /** * Used by the YAML loader to deserialize DAG nodes in the flow */ public class NodeBean implements Serializable { private String name; private Map<String, String> config; private List<String> dependsOn; private String type; private String condition; private List<NodeBean> nodes; private FlowTriggerBean trigger; public String getName() { return this.name; } public void setName(final String name) { this.name = name; } public Map<String, String> getConfig() { return this.config; } public void setConfig(final Map<String, String> config) { this.config = config; } public List<String> getDependsOn() { return this.dependsOn; } public void setDependsOn(final List<String> dependsOn) { this.dependsOn = dependsOn; } public String getType() { return this.type; } public void setType(final String type) { this.type = type; } public String getCondition() { return this.condition; } public void setCondition(final String condition) { this.condition = condition; } public List<NodeBean> getNodes() { return this.nodes; } public void setNodes(final List<NodeBean> nodes) { this.nodes = nodes; } public Props getProps() { final Props props = new Props(null, this.getConfig()); props.put(Constants.NODE_TYPE, this.getType()); return props; } public FlowTriggerBean getTrigger() { return this.trigger; } public void setTrigger(final FlowTriggerBean trigger) { this.trigger = trigger; } @Override public String toString() { return "NodeBean{" + "name='" + this.name + '\'' + ", config=" + this.config + ", dependsOn=" + this.dependsOn + ", type='" + this.type + '\'' + ", condition='" + this.condition + '\'' + ", nodes=" + this.nodes + ", trigger=" + this.trigger + '}'; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/NodeBeanLoader.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import static com.google.common.base.Preconditions.checkArgument; import azkaban.Constants; import azkaban.Constants.FlowTriggerProps; import com.google.common.base.Preconditions; import com.google.common.io.Files; import java.io.File; import java.io.FileInputStream; import java.time.Duration; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.quartz.CronExpression; import org.yaml.snakeyaml.Yaml; /** * Loads NodeBean from YAML files. */ public class NodeBeanLoader { public NodeBean load(final File flowFile) throws Exception { checkArgument(flowFile != null && flowFile.exists()); checkArgument(flowFile.getName().endsWith(Constants.FLOW_FILE_SUFFIX)); final NodeBean nodeBean = new Yaml().loadAs(new FileInputStream(flowFile), NodeBean.class); if (nodeBean == null) { throw new ProjectManagerException( "Failed to load flow file " + flowFile.getName() + ". Node bean is null ."); } nodeBean.setName(getFlowName(flowFile)); nodeBean.setType(Constants.FLOW_NODE_TYPE); return nodeBean; } public boolean validate(final NodeBean nodeBean) { final Set<String> nodeNames = new HashSet<>(); for (final NodeBean n : nodeBean.getNodes()) { if (!nodeNames.add(n.getName())) { // Duplicate jobs return false; } } for (final NodeBean n : nodeBean.getNodes()) { if (n.getDependsOn() != null && !nodeNames.containsAll(n.getDependsOn())) { // Undefined reference to dependent job return false; } } return true; } public AzkabanNode toAzkabanNode(final NodeBean nodeBean) { if (nodeBean.getType().equals(Constants.FLOW_NODE_TYPE)) { return new AzkabanFlow.AzkabanFlowBuilder() .name(nodeBean.getName()) .props(nodeBean.getProps()) .condition(nodeBean.getCondition()) .dependsOn(nodeBean.getDependsOn()) .nodes(nodeBean.getNodes().stream().map(this::toAzkabanNode).collect(Collectors.toList())) .flowTrigger(toFlowTrigger(nodeBean.getTrigger())) .build(); } else { return new AzkabanJob.AzkabanJobBuilder() .name(nodeBean.getName()) .props(nodeBean.getProps()) .condition(nodeBean.getCondition()) .type(nodeBean.getType()) .dependsOn(nodeBean.getDependsOn()) .build(); } } private void validateSchedule(final FlowTriggerBean flowTriggerBean) { final Map<String, String> scheduleMap = flowTriggerBean.getSchedule(); Preconditions.checkNotNull(scheduleMap, "flow trigger schedule must not be null"); Preconditions.checkArgument( scheduleMap.containsKey(FlowTriggerProps.SCHEDULE_TYPE) && scheduleMap.get (FlowTriggerProps.SCHEDULE_TYPE) .equals(FlowTriggerProps.CRON_SCHEDULE_TYPE), "flow trigger schedule type must be cron"); Preconditions .checkArgument(scheduleMap.containsKey(FlowTriggerProps.SCHEDULE_VALUE) && CronExpression .isValidExpression(scheduleMap.get(FlowTriggerProps.SCHEDULE_VALUE)), "flow trigger schedule value must be a valid cron expression"); final String cronExpression = scheduleMap.get(FlowTriggerProps.SCHEDULE_VALUE).trim(); final String[] cronParts = cronExpression.split("\\s+"); Preconditions .checkArgument(cronParts[0].equals("0"), "interval of flow trigger schedule has to" + " be larger than 1 min"); Preconditions.checkArgument(scheduleMap.size() == 2, "flow trigger schedule must " + "contain type and value only"); } private void validateFlowTriggerBean(final FlowTriggerBean flowTriggerBean) { validateSchedule(flowTriggerBean); validateTriggerDependencies(flowTriggerBean.getTriggerDependencies()); validateMaxWaitMins(flowTriggerBean); } private void validateMaxWaitMins(final FlowTriggerBean flowTriggerBean) { Preconditions.checkArgument(flowTriggerBean.getTriggerDependencies().isEmpty() || flowTriggerBean.getMaxWaitMins() != null, "max wait min cannot be null unless no dependency is defined"); if (flowTriggerBean.getMaxWaitMins() != null) { Preconditions.checkArgument(flowTriggerBean.getMaxWaitMins() >= Constants .MIN_FLOW_TRIGGER_WAIT_TIME.toMinutes(), "max wait min must be at least " + Constants .MIN_FLOW_TRIGGER_WAIT_TIME.toMinutes() + " min(s)"); } } /** * check uniqueness of dependency.name */ private void validateDepNameUniqueness(final List<TriggerDependencyBean> dependencies) { final Set<String> seen = new HashSet<>(); for (final TriggerDependencyBean dep : dependencies) { // set.add() returns false when there exists duplicate Preconditions.checkArgument(seen.add(dep.getName()), String.format("duplicate dependency" + ".name %s found, dependency.name should be unique", dep.getName())); } } /** * check uniqueness of dependency type and params */ private void validateDepDefinitionUniqueness(final List<TriggerDependencyBean> dependencies) { for (int i = 0; i < dependencies.size(); i++) { for (int j = i + 1; j < dependencies.size(); j++) { final boolean duplicateDepDefFound = dependencies.get(i).getType().equals(dependencies.get(j) .getType()) && dependencies.get(i).getParams() .equals(dependencies.get(j).getParams()); Preconditions.checkArgument(!duplicateDepDefFound, String.format("duplicate dependency" + "config %s found, dependency config should be unique", dependencies.get(i).getName())); } } } /** * validate name and type are present */ private void validateNameAndTypeArePresent(final List<TriggerDependencyBean> dependencies) { for (final TriggerDependencyBean dep : dependencies) { Preconditions.checkNotNull(dep.getName(), "dependency name is required"); Preconditions.checkNotNull(dep.getType(), "dependency type is required for " + dep.getName()); } } private void validateTriggerDependencies(final List<TriggerDependencyBean> dependencies) { validateNameAndTypeArePresent(dependencies); validateDepNameUniqueness(dependencies); validateDepDefinitionUniqueness(dependencies); validateDepType(dependencies); } private void validateDepType(final List<TriggerDependencyBean> dependencies) { //todo chengren311: validate dependencies are of valid dependency type } public FlowTrigger toFlowTrigger(final FlowTriggerBean flowTriggerBean) { if (flowTriggerBean == null) { return null; } else { validateFlowTriggerBean(flowTriggerBean); if (flowTriggerBean.getMaxWaitMins() != null && flowTriggerBean.getMaxWaitMins() > Constants.DEFAULT_FLOW_TRIGGER_MAX_WAIT_TIME .toMinutes()) { flowTriggerBean.setMaxWaitMins(Constants.DEFAULT_FLOW_TRIGGER_MAX_WAIT_TIME.toMinutes()); } final Duration duration = flowTriggerBean.getMaxWaitMins() == null ? null : Duration .ofMinutes(flowTriggerBean.getMaxWaitMins()); return new FlowTrigger( new CronSchedule(flowTriggerBean.getSchedule().get(FlowTriggerProps.SCHEDULE_VALUE)), flowTriggerBean.getTriggerDependencies().stream() .map(d -> new FlowTriggerDependency(d.getName(), d.getType(), d.getParams())) .collect(Collectors.toList()), duration); } } public String getFlowName(final File flowFile) { checkArgument(flowFile != null && flowFile.exists()); checkArgument(flowFile.getName().endsWith(Constants.FLOW_FILE_SUFFIX)); return Files.getNameWithoutExtension(flowFile.getName()); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/Project.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.flow.Flow; import azkaban.user.Permission; import azkaban.user.Permission.Type; import azkaban.user.User; import azkaban.utils.Pair; import com.google.common.collect.ImmutableMap; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; public class Project { private final int id; private final String name; private final LinkedHashMap<String, Permission> userPermissionMap = new LinkedHashMap<>(); private final LinkedHashMap<String, Permission> groupPermissionMap = new LinkedHashMap<>(); private final HashSet<String> proxyUsers = new HashSet<>(); private boolean active = true; private String description; private int version = -1; private long createTimestamp; private long lastModifiedTimestamp; private String lastModifiedUser; private String source; private Map<String, Flow> flows = new HashMap<>(); private Map<String, Object> metadata = new HashMap<>(); public Project(final int id, final String name) { this.id = id; this.name = name; } public static Project projectFromObject(final Object object) { final Map<String, Object> projectObject = (Map<String, Object>) object; final int id = (Integer) projectObject.get("id"); final String name = (String) projectObject.get("name"); final String description = (String) projectObject.get("description"); final String lastModifiedUser = (String) projectObject.get("lastModifiedUser"); final long createTimestamp = coerceToLong(projectObject.get("createTimestamp")); final long lastModifiedTimestamp = coerceToLong(projectObject.get("lastModifiedTimestamp")); final String source = (String) projectObject.get("source"); Boolean active = (Boolean) projectObject.get("active"); active = active == null ? true : active; final int version = (Integer) projectObject.get("version"); final Map<String, Object> metadata = (Map<String, Object>) projectObject.get("metadata"); final Project project = new Project(id, name); project.setVersion(version); project.setDescription(description); project.setCreateTimestamp(createTimestamp); project.setLastModifiedTimestamp(lastModifiedTimestamp); project.setLastModifiedUser(lastModifiedUser); project.setActive(active); if (source != null) { project.setSource(source); } if (metadata != null) { project.setMetadata(metadata); } final List<String> proxyUserList = (List<String>) projectObject.get("proxyUsers"); project.addAllProxyUsers(proxyUserList); return project; } private static long coerceToLong(final Object obj) { if (obj == null) { return 0; } else if (obj instanceof Integer) { return (Integer) obj; } return (Long) obj; } public String getName() { return this.name; } public Flow getFlow(final String flowId) { if (this.flows == null) { return null; } return this.flows.get(flowId); } public Map<String, Flow> getFlowMap() { return this.flows; } public List<Flow> getFlows() { List<Flow> retFlow = null; if (this.flows != null) { retFlow = new ArrayList<>(this.flows.values()); } else { retFlow = new ArrayList<>(); } return retFlow; } public void setFlows(final Map<String, Flow> flows) { this.flows = ImmutableMap.copyOf(flows); } public Permission getCollectivePermission(final User user) { final Permission permissions = new Permission(); Permission perm = this.userPermissionMap.get(user.getUserId()); if (perm != null) { permissions.addPermissions(perm); } for (final String group : user.getGroups()) { perm = this.groupPermissionMap.get(group); if (perm != null) { permissions.addPermissions(perm); } } return permissions; } public Set<String> getProxyUsers() { return new HashSet<>(this.proxyUsers); } public void addAllProxyUsers(final Collection<String> proxyUsers) { this.proxyUsers.addAll(proxyUsers); } public boolean hasProxyUser(final String proxy) { return this.proxyUsers.contains(proxy); } public void addProxyUser(final String user) { this.proxyUsers.add(user); } public void removeProxyUser(final String user) { this.proxyUsers.remove(user); } public boolean hasPermission(final User user, final Type type) { final Permission perm = this.userPermissionMap.get(user.getUserId()); if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type))) { return true; } return hasGroupPermission(user, type); } public boolean hasUserPermission(final User user, final Type type) { final Permission perm = this.userPermissionMap.get(user.getUserId()); if (perm == null) { // Check group return false; } if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) { return true; } return false; } public boolean hasGroupPermission(final User user, final Type type) { for (final String group : user.getGroups()) { final Permission perm = this.groupPermissionMap.get(group); if (perm != null) { if (perm.isPermissionSet(Type.ADMIN) || perm.isPermissionSet(type)) { return true; } } } return false; } public List<String> getUsersWithPermission(final Type type) { final ArrayList<String> users = new ArrayList<>(); for (final Map.Entry<String, Permission> entry : this.userPermissionMap.entrySet()) { final Permission perm = entry.getValue(); if (perm.isPermissionSet(type)) { users.add(entry.getKey()); } } return users; } public List<Pair<String, Permission>> getUserPermissions() { final ArrayList<Pair<String, Permission>> permissions = new ArrayList<>(); for (final Map.Entry<String, Permission> entry : this.userPermissionMap.entrySet()) { permissions.add(new Pair<>(entry.getKey(), entry .getValue())); } return permissions; } public List<Pair<String, Permission>> getGroupPermissions() { final ArrayList<Pair<String, Permission>> permissions = new ArrayList<>(); for (final Map.Entry<String, Permission> entry : this.groupPermissionMap.entrySet()) { permissions.add(new Pair<>(entry.getKey(), entry .getValue())); } return permissions; } public String getDescription() { return this.description; } public void setDescription(final String description) { this.description = description; } public void setUserPermission(final String userid, final Permission perm) { this.userPermissionMap.put(userid, perm); } public void setGroupPermission(final String group, final Permission perm) { this.groupPermissionMap.put(group, perm); } public Permission getUserPermission(final User user) { return this.userPermissionMap.get(user.getUserId()); } public Permission getGroupPermission(final String group) { return this.groupPermissionMap.get(group); } public Permission getUserPermission(final String userID) { return this.userPermissionMap.get(userID); } public void removeGroupPermission(final String group) { this.groupPermissionMap.remove(group); } public void removeUserPermission(final String userId) { this.userPermissionMap.remove(userId); } public void clearUserPermission() { this.userPermissionMap.clear(); } public long getCreateTimestamp() { return this.createTimestamp; } public void setCreateTimestamp(final long createTimestamp) { this.createTimestamp = createTimestamp; } public long getLastModifiedTimestamp() { return this.lastModifiedTimestamp; } public void setLastModifiedTimestamp(final long lastModifiedTimestamp) { this.lastModifiedTimestamp = lastModifiedTimestamp; } public Object toObject() { final HashMap<String, Object> projectObject = new HashMap<>(); projectObject.put("id", this.id); projectObject.put("name", this.name); projectObject.put("description", this.description); projectObject.put("createTimestamp", this.createTimestamp); projectObject.put("lastModifiedTimestamp", this.lastModifiedTimestamp); projectObject.put("lastModifiedUser", this.lastModifiedUser); projectObject.put("version", this.version); if (!this.active) { projectObject.put("active", false); } if (this.source != null) { projectObject.put("source", this.source); } if (this.metadata != null) { projectObject.put("metadata", this.metadata); } final ArrayList<String> proxyUserList = new ArrayList<>(this.proxyUsers); projectObject.put("proxyUsers", proxyUserList); return projectObject; } public String getLastModifiedUser() { return this.lastModifiedUser; } public void setLastModifiedUser(final String lastModifiedUser) { this.lastModifiedUser = lastModifiedUser; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + (this.active ? 1231 : 1237); result = prime * result + (int) (this.createTimestamp ^ (this.createTimestamp >>> 32)); result = prime * result + ((this.description == null) ? 0 : this.description.hashCode()); result = prime * result + this.id; result = prime * result + (int) (this.lastModifiedTimestamp ^ (this.lastModifiedTimestamp >>> 32)); result = prime * result + ((this.lastModifiedUser == null) ? 0 : this.lastModifiedUser.hashCode()); result = prime * result + ((this.name == null) ? 0 : this.name.hashCode()); result = prime * result + ((this.source == null) ? 0 : this.source.hashCode()); result = prime * result + this.version; return result; } @Override public boolean equals(final Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Project other = (Project) obj; if (this.active != other.active) { return false; } if (this.createTimestamp != other.createTimestamp) { return false; } if (this.description == null) { if (other.description != null) { return false; } } else if (!this.description.equals(other.description)) { return false; } if (this.id != other.id) { return false; } if (this.lastModifiedTimestamp != other.lastModifiedTimestamp) { return false; } if (this.lastModifiedUser == null) { if (other.lastModifiedUser != null) { return false; } } else if (!this.lastModifiedUser.equals(other.lastModifiedUser)) { return false; } if (this.name == null) { if (other.name != null) { return false; } } else if (!this.name.equals(other.name)) { return false; } if (this.source == null) { if (other.source != null) { return false; } } else if (!this.source.equals(other.source)) { return false; } if (this.version != other.version) { return false; } return true; } public String getSource() { return this.source; } public void setSource(final String source) { this.source = source; } public Map<String, Object> getMetadata() { if (this.metadata == null) { this.metadata = new HashMap<>(); } return this.metadata; } protected void setMetadata(final Map<String, Object> metadata) { this.metadata = metadata; } public int getId() { return this.id; } public boolean isActive() { return this.active; } public void setActive(final boolean active) { this.active = active; } public int getVersion() { return this.version; } public void setVersion(final int version) { this.version = version; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectFileHandler.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.spi.Dependency; import java.io.File; import java.util.Set; public class ProjectFileHandler { private final int projectId; private final int version; private final long uploadTime; private final String fileType; private final String fileName; private final String uploader; private final byte[] MD5Hash; private final int numChunks; private final Set<Dependency> startupDependencies; private final String resourceId; private final String uploaderIpAddr; private File localFile = null; public ProjectFileHandler( final int projectId, final int version, final long uploadTime, final String uploader, final String fileType, final String fileName, final int numChunks, final byte[] MD5Hash, final Set<Dependency> startupDependencies, final String resourceId, final String uploaderIpAddr) { this.projectId = projectId; this.version = version; this.uploadTime = uploadTime; this.uploader = uploader; this.fileType = fileType; this.fileName = fileName; this.MD5Hash = MD5Hash; this.numChunks = numChunks; this.startupDependencies = startupDependencies; this.resourceId = resourceId; this.uploaderIpAddr = uploaderIpAddr; } public int getProjectId() { return this.projectId; } public int getVersion() { return this.version; } public long getUploadTime() { return this.uploadTime; } public String getFileType() { return this.fileType; } public String getFileName() { return this.fileName; } public byte[] getMD5Hash() { return this.MD5Hash; } public File getLocalFile() { return this.localFile; } public synchronized void setLocalFile(final File localFile) { this.localFile = localFile; } public synchronized void deleteLocalFile() { if (this.localFile != null) { this.localFile.delete(); this.localFile = null; } } public String getUploader() { return this.uploader; } public int getNumChunks() { return this.numChunks; } public Set<Dependency> getStartupDependencies() { return this.startupDependencies; } public String getResourceId() { return this.resourceId; } public String getUploaderIpAddr() { return this.uploaderIpAddr; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectLoader.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.flow.Flow; import azkaban.project.ProjectLogEvent.EventType; import azkaban.user.Permission; import azkaban.user.User; import azkaban.utils.Props; import azkaban.utils.Triple; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.List; import java.util.Map; public interface ProjectLoader { /** * Returns all projects which are active */ List<Project> fetchAllActiveProjects() throws ProjectManagerException; /** * Loads whole project, including permissions, by the project id. */ Project fetchProjectById(int id) throws ProjectManagerException; /** * Loads whole project, including permissions, by the project name. */ Project fetchProjectByName(String name) throws ProjectManagerException; /** * Should create an empty project with the given name and user and adds it to the data store. It * will auto assign a unique id for this project if successful. * * If an active project of the same name exists, it will throw an exception. If the name and * description of the project exceeds the store's constraints, it will throw an exception. * * @throws ProjectManagerException if an active project of the same name exists. */ Project createNewProject(String name, String description, User creator) throws ProjectManagerException; /** * Removes the project by marking it inactive. */ void removeProject(Project project, String user) throws ProjectManagerException; /** * Adds and updates the user permissions. Does not check if the user is valid. If the permission * doesn't exist, it adds. If the permission exists, it updates. */ void updatePermission(Project project, String name, Permission perm, boolean isGroup) throws ProjectManagerException; void removePermission(Project project, String name, boolean isGroup) throws ProjectManagerException; /** * Modifies and commits the project description. */ void updateDescription(Project project, String description, String user) throws ProjectManagerException; /** * Stores logs for a particular project. Will soft fail rather than throw exception. * * @param message return true if the posting was success. */ boolean postEvent(Project project, EventType type, String user, String message); /** * Returns all the events for a project sorted */ List<ProjectLogEvent> getProjectEvents(Project project, int num, int skip) throws ProjectManagerException; /** * Will upload the files and return the version number of the file uploaded. */ void uploadProjectFile(int projectId, int version, File localFile, String user, String uploader_ip_addr) throws ProjectManagerException; /** * Add project and version info to the project_versions table. This current maintains the metadata * for each uploaded version of the project */ void addProjectVersion(int projectId, int version, File localFile, File startupDependencies, String uploader, byte[] md5, String resourceId, String uploaderIPAddr) throws ProjectManagerException; /** * Fetch project metadata from project_versions table * * @param projectId project ID * @param version version * @return ProjectFileHandler object containing the metadata */ ProjectFileHandler fetchProjectMetaData(int projectId, int version); /** * Get file that's uploaded. */ ProjectFileHandler getUploadedFile(int projectId, int version) throws ProjectManagerException; /** * Changes and commits different project version. */ void changeProjectVersion(Project project, int version, String user) throws ProjectManagerException; void updateFlow(Project project, int version, Flow flow) throws ProjectManagerException; /** * Uploads all computed flows */ void uploadFlows(Project project, int version, Collection<Flow> flows) throws ProjectManagerException; /** * Upload just one flow. */ void uploadFlow(Project project, int version, Flow flow) throws ProjectManagerException; /** * Fetches one particular flow. */ Flow fetchFlow(Project project, String flowId) throws ProjectManagerException; /** * Fetches all flows for a given project */ List<Flow> fetchAllProjectFlows(final Project project) throws ProjectManagerException; /** * Fetches all flows for all projects. */ Map<Project, List<Flow>> fetchAllFlowsForProjects(List<Project> projects) throws ProjectManagerException; /** * Gets the latest upload version. */ int getLatestProjectVersion(Project project) throws ProjectManagerException; /** * Upload Project properties */ void uploadProjectProperty(Project project, Props props) throws ProjectManagerException; /** * Upload Project properties. Map contains key value of path and properties */ void uploadProjectProperties(Project project, List<Props> properties) throws ProjectManagerException; /** * Fetch project properties */ Props fetchProjectProperty(Project project, String propsName) throws ProjectManagerException; /** * Fetch all project properties */ Map<String, Props> fetchProjectProperties(int projectId, int version) throws ProjectManagerException; /** * Cleans all project versions less than the provided version, except the versions to exclude * given as argument */ void cleanOlderProjectVersion(int projectId, int version, final List<Integer> excludedVersions) throws ProjectManagerException; void updateProjectProperty(Project project, Props props) throws ProjectManagerException; Props fetchProjectProperty(int projectId, int projectVer, String propsName) throws ProjectManagerException; void updateProjectSettings(Project project) throws ProjectManagerException; /** * Uploads flow file. */ void uploadFlowFile(int projectId, int projectVersion, File flowFile, int flowVersion) throws ProjectManagerException; /** * Gets flow file that's uploaded. */ File getUploadedFlowFile(int projectId, int projectVersion, String flowFileName, int flowVersion, final File tempDir) throws ProjectManagerException, IOException; /** * Gets the latest flow version. */ int getLatestFlowVersion(int projectId, int projectVersion, String flowName) throws ProjectManagerException; /** * Check if flow file has been uploaded. */ boolean isFlowFileUploaded(int projectId, int projectVersion) throws ProjectManagerException; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectLogEvent.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; public class ProjectLogEvent { private final int projectId; private final String user; private final long time; private final EventType type; private final String message; public ProjectLogEvent(final int projectId, final EventType type, final long time, final String user, final String message) { this.projectId = projectId; this.user = user; this.time = time; this.type = type; this.message = message; } public int getProjectId() { return this.projectId; } public String getUser() { return this.user; } public long getTime() { return this.time; } public EventType getType() { return this.type; } public String getMessage() { return this.message; } /** * Log event type messages. Do not change the numeric representation of each enum. * * Only represent from 0 to 255 different codes. */ public static enum EventType { ERROR(128), CREATED(1), DELETED(2), USER_PERMISSION(3), GROUP_PERMISSION(4), DESCRIPTION(5), UPLOADED(6), SCHEDULE(7), SLA(8), PROXY_USER(9), PURGE(10), PROPERTY_OVERRIDE(11); private final int numVal; EventType(final int numVal) { this.numVal = numVal; } public static EventType fromInteger(final int x) { switch (x) { case 1: return CREATED; case 2: return DELETED; case 3: return USER_PERMISSION; case 4: return GROUP_PERMISSION; case 5: return DESCRIPTION; case 6: return UPLOADED; case 7: return SCHEDULE; case 8: return SLA; case 9: return PROXY_USER; case 10: return PURGE; case 11: return PROPERTY_OVERRIDE; case 128: return ERROR; default: return ERROR; } } public int getNumVal() { return this.numVal; } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectManager.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import static java.util.Objects.requireNonNull; import azkaban.Constants; import azkaban.executor.ExecutorManagerException; import azkaban.flow.Flow; import azkaban.project.ProjectLogEvent.EventType; import azkaban.project.validator.ValidationReport; import azkaban.storage.ProjectStorageManager; import azkaban.user.Permission; import azkaban.user.Permission.Type; import azkaban.user.User; import azkaban.utils.CaseInsensitiveConcurrentHashMap; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import com.google.common.io.Files; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; import javax.inject.Inject; import javax.inject.Singleton; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton public class ProjectManager { private static final Logger logger = LoggerFactory.getLogger(ProjectManager.class); private final AzkabanProjectLoader azkabanProjectLoader; private final ProjectLoader projectLoader; private final Props props; private final boolean creatorDefaultPermissions; // Both projectsById and projectsByName cache need to be thread safe since they are accessed // from multiple threads concurrently without external synchronization for performance. private final ConcurrentHashMap<Integer, Project> projectsById = new ConcurrentHashMap<>(); private final CaseInsensitiveConcurrentHashMap<Project> projectsByName = new CaseInsensitiveConcurrentHashMap<>(); @Inject public ProjectManager(final AzkabanProjectLoader azkabanProjectLoader, final ProjectLoader loader, final ProjectStorageManager projectStorageManager, final Props props) { this.projectLoader = requireNonNull(loader); this.props = requireNonNull(props); this.azkabanProjectLoader = requireNonNull(azkabanProjectLoader); this.creatorDefaultPermissions = props.getBoolean("creator.default.proxy", true); loadAllProjects(); logger.info("Loading whitelisted projects."); loadProjectWhiteList(); logger.info("ProjectManager instance created."); } public boolean hasFlowTrigger(final Project project, final Flow flow) throws IOException, ProjectManagerException { final String flowFileName = flow.getId() + ".flow"; final int latestFlowVersion = this.projectLoader.getLatestFlowVersion(project.getId(), flow .getVersion(), flowFileName); if (latestFlowVersion > 0) { final File tempDir = com.google.common.io.Files.createTempDir(); final File flowFile; try { flowFile = this.projectLoader .getUploadedFlowFile(project.getId(), project.getVersion(), flowFileName, latestFlowVersion, tempDir); final FlowTrigger flowTrigger = FlowLoaderUtils.getFlowTriggerFromYamlFile(flowFile); return flowTrigger != null; } catch (final Exception ex) { logger.error("error in getting flow file", ex); throw ex; } finally { FlowLoaderUtils.cleanUpDir(tempDir); } } else { return false; } } private void loadAllProjects() { final List<Project> projects; logger.info("Loading active projects."); try { projects = this.projectLoader.fetchAllActiveProjects(); } catch (final ProjectManagerException e) { throw new RuntimeException("Could not load projects from store.", e); } for (final Project proj : projects) { this.projectsByName.put(proj.getName(), proj); this.projectsById.put(proj.getId(), proj); } logger.info("Loading flows from active projects."); loadAllFlowsForAllProjects(projects); } private void loadAllFlowsForAllProjects(final List<Project> projects) { try { Map<Project, List<Flow>> projectToFlows = this.projectLoader.fetchAllFlowsForProjects(projects); // Load the flows into the project objects for (Map.Entry<Project, List<Flow>> entry : projectToFlows.entrySet()) { Project project = entry.getKey(); List<Flow> flows = entry.getValue(); final Map<String, Flow> flowMap = new HashMap<>(); for (final Flow flow : flows) { flowMap.put(flow.getId(), flow); } project.setFlows(flowMap); } } catch (final ProjectManagerException e) { throw new RuntimeException("Could not load projects flows from store.", e); } } public Props getProps() { return this.props; } public List<Project> getUserProjects(final User user) { final ArrayList<Project> array = new ArrayList<>(); for (final Project project : this.projectsById.values()) { final Permission perm = project.getUserPermission(user); if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm .isPermissionSet(Type.READ))) { array.add(project); } } return array; } public List<Project> getGroupProjects(final User user) { final List<Project> array = new ArrayList<>(); for (final Project project : this.projectsById.values()) { if (project.hasGroupPermission(user, Type.READ)) { array.add(project); } } return array; } public List<Project> getUserProjectsByRegex(final User user, final String regexPattern) { final List<Project> array = new ArrayList<>(); final Pattern pattern; try { pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE); } catch (final PatternSyntaxException e) { logger.error("Bad regex pattern {}", regexPattern); return array; } for (final Project project : this.projectsById.values()) { final Permission perm = project.getUserPermission(user); if (perm != null && (perm.isPermissionSet(Type.ADMIN) || perm .isPermissionSet(Type.READ))) { if (pattern.matcher(project.getName()).find()) { array.add(project); } } } return array; } public List<Project> getProjects() { return new ArrayList<>(this.projectsById.values()); } public List<Project> getProjectsByRegex(final String regexPattern) { final List<Project> allProjects = new ArrayList<>(); final Pattern pattern; try { pattern = Pattern.compile(regexPattern, Pattern.CASE_INSENSITIVE); } catch (final PatternSyntaxException e) { logger.error("Bad regex pattern {}", regexPattern); return allProjects; } for (final Project project : getProjects()) { if (pattern.matcher(project.getName()).find()) { allProjects.add(project); } } return allProjects; } /** * Checks if a project is active using project_id */ public Boolean isActiveProject(final int id) { return this.projectsById.containsKey(id); } /** * fetch active project by project name. Queries the cache first then db if not found */ public Project getProject(final String name) { Project fetchedProject = this.projectsByName.get(name); if (fetchedProject == null) { try { fetchedProject = this.projectLoader.fetchProjectByName(name); if (fetchedProject != null) { logger.info("Project {} not found in cache, fetched from DB.", name); } else { logger.info("No active project with name {} exists in cache or DB.", name); } } catch (final ProjectManagerException e) { logger.error("Could not load project from store.", e); } } return fetchedProject; } /** * fetch active project from cache and inactive projects from db by project_id */ public Project getProject(final int id) { Project fetchedProject = this.projectsById.get(id); if (fetchedProject == null) { try { fetchedProject = this.projectLoader.fetchProjectById(id); } catch (final ProjectManagerException e) { logger.error("Could not load project from store.", e); } } return fetchedProject; } public Project createProject(final String projectName, final String description, final User creator) throws ProjectManagerException { if (projectName == null || projectName.trim().isEmpty()) { throw new ProjectManagerException("Project name cannot be empty."); } else if (description == null || description.trim().isEmpty()) { throw new ProjectManagerException("Description cannot be empty."); } else if (creator == null) { throw new ProjectManagerException("Valid creator user must be set."); } else if (!projectName.matches("[a-zA-Z][a-zA-Z_0-9|-]*")) { throw new ProjectManagerException( "Project names must start with a letter, followed by any number of letters, digits, '-' or '_'."); } final Project newProject; synchronized (this) { if (this.projectsByName.containsKey(projectName)) { throw new ProjectManagerException("Project already exists."); } logger.info("Trying to create {} by user {}", projectName, creator.getUserId()); newProject = this.projectLoader.createNewProject(projectName, description, creator); this.projectsByName.put(newProject.getName(), newProject); this.projectsById.put(newProject.getId(), newProject); } if (this.creatorDefaultPermissions) { // Add permission to project this.projectLoader.updatePermission(newProject, creator.getUserId(), new Permission(Permission.Type.ADMIN), false); // Add proxy user newProject.addProxyUser(creator.getUserId()); try { updateProjectSetting(newProject); } catch (final ProjectManagerException e) { e.printStackTrace(); throw e; } } this.projectLoader.postEvent(newProject, EventType.CREATED, creator.getUserId(), null); return newProject; } /** * Permanently delete all project files and properties data for all versions of a project and log * event in project_events table */ public synchronized Project purgeProject(final Project project, final User deleter) throws ProjectManagerException { this.projectLoader.cleanOlderProjectVersion(project.getId(), project.getVersion() + 1, Collections.emptyList()); this.projectLoader .postEvent(project, EventType.PURGE, deleter.getUserId(), String .format("Purged versions before %d", project.getVersion() + 1)); return project; } public synchronized Project removeProject(final Project project, final User deleter) throws ProjectManagerException { this.projectLoader.removeProject(project, deleter.getUserId()); this.projectLoader.postEvent(project, EventType.DELETED, deleter.getUserId(), null); this.projectsByName.remove(project.getName()); this.projectsById.remove(project.getId()); return project; } public void updateProjectDescription(final Project project, final String description, final User modifier) throws ProjectManagerException { this.projectLoader.updateDescription(project, description, modifier.getUserId()); this.projectLoader.postEvent(project, EventType.DESCRIPTION, modifier.getUserId(), "Description changed to " + description); } public List<ProjectLogEvent> getProjectEventLogs(final Project project, final int results, final int skip) throws ProjectManagerException { return this.projectLoader.getProjectEvents(project, results, skip); } public Props getPropertiesFromFlowFile(final Flow flow, final String jobName, final String flowFileName, final int flowVersion) throws ProjectManagerException { File tempDir = null; Props props = null; try { tempDir = Files.createTempDir(); final File flowFile = this.projectLoader.getUploadedFlowFile(flow.getProjectId(), flow .getVersion(), flowFileName, flowVersion, tempDir); final String path = jobName == null ? flow.getId() : flow.getId() + Constants.PATH_DELIMITER + jobName; props = FlowLoaderUtils.getPropsFromYamlFile(path, flowFile); } catch (final Exception e) { this.logger.error("Failed to get props from flow file. " + e); } finally { FlowLoaderUtils.cleanUpDir(tempDir); } return props; } public Props getProperties(final Project project, final Flow flow, final String jobName, final String source) throws ProjectManagerException { if (FlowLoaderUtils.isAzkabanFlowVersion20(flow.getAzkabanFlowVersion())) { // Return the properties from the original uploaded flow file. return getPropertiesFromFlowFile(flow, jobName, source, 1); } else { return this.projectLoader.fetchProjectProperty(project, source); } } public Props getJobOverrideProperty(final Project project, final Flow flow, final String jobName, final String source) throws ProjectManagerException { if (FlowLoaderUtils.isAzkabanFlowVersion20(flow.getAzkabanFlowVersion())) { final int flowVersion = this.projectLoader .getLatestFlowVersion(flow.getProjectId(), flow.getVersion(), source); return getPropertiesFromFlowFile(flow, jobName, source, flowVersion); } else { return this.projectLoader .fetchProjectProperty(project, jobName + Constants.JOB_OVERRIDE_SUFFIX); } } public void setJobOverrideProperty(final Project project, final Flow flow, final Props prop, final String jobName, final String source, final User modifier) throws ProjectManagerException { File tempDir = null; Props oldProps = null; if (FlowLoaderUtils.isAzkabanFlowVersion20(flow.getAzkabanFlowVersion())) { try { tempDir = Files.createTempDir(); final int flowVersion = this.projectLoader.getLatestFlowVersion(flow.getProjectId(), flow .getVersion(), source); final File flowFile = this.projectLoader.getUploadedFlowFile(flow.getProjectId(), flow .getVersion(), source, flowVersion, tempDir); final String path = flow.getId() + Constants.PATH_DELIMITER + jobName; oldProps = FlowLoaderUtils.getPropsFromYamlFile(path, flowFile); FlowLoaderUtils.setPropsInYamlFile(path, flowFile, prop); this.projectLoader .uploadFlowFile(flow.getProjectId(), flow.getVersion(), flowFile, flowVersion + 1); } catch (final Exception e) { this.logger.error("Failed to set job override property. " + e); } finally { FlowLoaderUtils.cleanUpDir(tempDir); } } else { prop.setSource(jobName + Constants.JOB_OVERRIDE_SUFFIX); oldProps = this.projectLoader.fetchProjectProperty(project, prop.getSource()); if (oldProps == null) { this.projectLoader.uploadProjectProperty(project, prop); } else { this.projectLoader.updateProjectProperty(project, prop); } } final String diffMessage = PropsUtils.getPropertyDiff(oldProps, prop); this.projectLoader.postEvent(project, EventType.PROPERTY_OVERRIDE, modifier.getUserId(), diffMessage); return; } public void updateProjectSetting(final Project project) throws ProjectManagerException { this.projectLoader.updateProjectSettings(project); } public void addProjectProxyUser(final Project project, final String proxyName, final User modifier) throws ProjectManagerException { logger.info("User {} adding proxy user {} to project {}", modifier.getUserId(), proxyName, project.getName()); project.addProxyUser(proxyName); this.projectLoader.postEvent(project, EventType.PROXY_USER, modifier.getUserId(), "Proxy user " + proxyName + " is added to project."); updateProjectSetting(project); } public void removeProjectProxyUser(final Project project, final String proxyName, final User modifier) throws ProjectManagerException { logger.info("User {} removing proxy user {} from project {}", modifier.getUserId(), proxyName, project.getName()); project.removeProxyUser(proxyName); this.projectLoader.postEvent(project, EventType.PROXY_USER, modifier.getUserId(), "Proxy user " + proxyName + " has been removed form the project."); updateProjectSetting(project); } public void updateProjectPermission(final Project project, final String name, final Permission perm, final boolean group, final User modifier) throws ProjectManagerException { logger.info("User {} updating permissions for project {} for {} {}", modifier.getUserId(), project.getName(), name, perm.toString()); this.projectLoader.updatePermission(project, name, perm, group); if (group) { this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION, modifier.getUserId(), "Permission for group " + name + " set to " + perm.toString()); } else { this.projectLoader.postEvent(project, EventType.USER_PERMISSION, modifier.getUserId(), "Permission for user " + name + " set to " + perm.toString()); } } public void removeProjectPermission(final Project project, final String name, final boolean group, final User modifier) throws ProjectManagerException { logger.info("User {} removing permissions for project {} for {}", modifier.getUserId(), project.getName(), name); this.projectLoader.removePermission(project, name, group); if (group) { this.projectLoader.postEvent(project, EventType.GROUP_PERMISSION, modifier.getUserId(), "Permission for group " + name + " removed."); } else { this.projectLoader.postEvent(project, EventType.USER_PERMISSION, modifier.getUserId(), "Permission for user " + name + " removed."); } } /** * This method retrieves the uploaded project zip file from DB. A temporary file is created to * hold the content of the uploaded zip file. This temporary file is provided in the * ProjectFileHandler instance and the caller of this method should call method * {@ProjectFileHandler.deleteLocalFile} to delete the temporary file. * * @param version - latest version is used if value is -1 * @return ProjectFileHandler - null if can't find project zip file based on project name and * version */ public ProjectFileHandler getProjectFileHandler(final Project project, final int version) throws ProjectManagerException { return this.azkabanProjectLoader.getProjectFile(project, version); } public Map<String, ValidationReport> uploadProject(final Project project, final File archive, final String fileType, final User uploader, final Props additionalProps, final String uploaderIPAddr) throws ProjectManagerException, ExecutorManagerException { return this.azkabanProjectLoader .uploadProject(project, archive, fileType, uploader, additionalProps, uploaderIPAddr); } public void updateFlow(final Project project, final Flow flow) throws ProjectManagerException { this.projectLoader.updateFlow(project, flow.getVersion(), flow); } public void postProjectEvent(final Project project, final EventType type, final String user, final String message) { this.projectLoader.postEvent(project, type, user, message); } public boolean loadProjectWhiteList() { if (this.props.containsKey(ProjectWhitelist.XML_FILE_PARAM)) { ProjectWhitelist.load(this.props); return true; } return false; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectManagerException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project; import azkaban.spi.AzkabanException; public class ProjectManagerException extends AzkabanException { private static final long serialVersionUID = 1L; public ProjectManagerException(final String message) { super(message); } public ProjectManagerException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/ProjectWhitelist.java
package azkaban.project; import azkaban.utils.Props; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * @author wkang * * This class manages project whitelist defined in xml config file. An single xml config file * contains different types of whitelisted projects. For additional type of whitelist, modify * WhitelistType enum. * * The xml config file should in the following format. Please note the tag <MemoryCheck> is same as * the defined enum MemoryCheck * * <ProjectWhitelist> <MemoryCheck> <project projectname="project1" /> <project * projectname="project2" /> </MemoryCheck> <ProjectWhitelist> */ public class ProjectWhitelist { public static final String XML_FILE_PARAM = "project.whitelist.xml.file"; private static final String PROJECT_WHITELIST_TAG = "ProjectWhitelist"; private static final String PROJECT_TAG = "project"; private static final String PROJECTID_ATTR = "projectid"; private static final AtomicReference<Map<WhitelistType, Set<Integer>>> projectsWhitelisted = new AtomicReference<>(); static void load(final Props props) { final String xmlFile = props.getString(XML_FILE_PARAM); parseXMLFile(xmlFile); } private static void parseXMLFile(final String xmlFile) { final File file = new File(xmlFile); if (!file.exists()) { throw new IllegalArgumentException("Project whitelist xml file " + xmlFile + " doesn't exist."); } // Creating the document builder to parse xml. final DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = null; try { builder = docBuilderFactory.newDocumentBuilder(); } catch (final ParserConfigurationException e) { throw new IllegalArgumentException( "Exception while parsing project whitelist xml. Document builder not created.", e); } Document doc = null; try { doc = builder.parse(file); } catch (final SAXException e) { throw new IllegalArgumentException("Exception while parsing " + xmlFile + ". Invalid XML.", e); } catch (final IOException e) { throw new IllegalArgumentException("Exception while parsing " + xmlFile + ". Error reading file.", e); } final Map<WhitelistType, Set<Integer>> projsWhitelisted = new HashMap<>(); final NodeList tagList = doc.getChildNodes(); if (!tagList.item(0).getNodeName().equals(PROJECT_WHITELIST_TAG)) { throw new RuntimeException("Cannot find tag '" + PROJECT_WHITELIST_TAG + "' in " + xmlFile); } final NodeList whitelist = tagList.item(0).getChildNodes(); for (int n = 0; n < whitelist.getLength(); ++n) { if (whitelist.item(n).getNodeType() != Node.ELEMENT_NODE) { continue; } final String whitelistType = whitelist.item(n).getNodeName(); final Set<Integer> projs = new HashSet<>(); final NodeList projectsList = whitelist.item(n).getChildNodes(); for (int i = 0; i < projectsList.getLength(); ++i) { final Node node = projectsList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { if (node.getNodeName().equals(PROJECT_TAG)) { parseProjectTag(node, projs); } } } projsWhitelisted.put(WhitelistType.valueOf(whitelistType), projs); } projectsWhitelisted.set(projsWhitelisted); } private static void parseProjectTag(final Node node, final Set<Integer> projects) { final NamedNodeMap projectAttrMap = node.getAttributes(); final Node projectIdAttr = projectAttrMap.getNamedItem(PROJECTID_ATTR); if (projectIdAttr == null) { throw new RuntimeException("Error loading project. The '" + PROJECTID_ATTR + "' attribute doesn't exist"); } final String projectId = projectIdAttr.getNodeValue(); projects.add(Integer.parseInt(projectId)); } public static boolean isProjectWhitelisted(final int project, final WhitelistType whitelistType) { final Map<WhitelistType, Set<Integer>> projsWhitelisted = projectsWhitelisted.get(); if (projsWhitelisted != null) { final Set<Integer> projs = projsWhitelisted.get(whitelistType); if (projs != null) { return projs.contains(project); } } return false; } /** * The tag in the project whitelist xml config file should be same as the defined enums. */ public static enum WhitelistType { MemoryCheck, NumJobPerFlow } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/TriggerDependencyBean.java
/* * Copyright 2017 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. * */ package azkaban.project; import java.util.Collections; import java.util.Map; /** * Java bean loaded from YAML file to represent a trigger dependency. */ public class TriggerDependencyBean { private String name; private String type; private Map<String, String> params; public String getName() { return this.name; } public void setName(final String name) { this.name = name; } public String getType() { return this.type; } public void setType(final String type) { this.type = type; } public Map<String, String> getParams() { return this.params == null ? Collections.emptyMap() : this.params; } public void setParams(final Map<String, String> params) { this.params = params; } @Override public String toString() { return "TriggerDependencyBean{" + "name='" + this.name + '\'' + ", type='" + this.type + '\'' + ", params=" + this.params + '}'; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ProjectValidator.java
package azkaban.project.validator; import azkaban.project.Project; import azkaban.utils.Props; import java.io.File; /** * Interface to be implemented by plugins which are to be registered with Azkaban as project * validators that validate a project before uploaded into Azkaban. */ public interface ProjectValidator { /** * Initialize the validator using the given properties. */ boolean initialize(Props configuration); /** * Return a user friendly name of the validator. */ String getValidatorName(); /** * Validate the project inside the given directory. The validator, using its own validation logic, * will generate a {@link ValidationReport} representing the result of the validation. * * @param project project to validate * @param projectDir directory of the uncompressed project * @param additionalProps additional project-specific props (some validators may use this, others may not) * * @return The resulting ValidationReport. */ ValidationReport validateProject(Project project, File projectDir, Props additionalProps); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ProjectValidatorCacheable.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.project.validator; import azkaban.project.Project; import azkaban.utils.Props; import java.io.File; /** * Interface to be implemented by plugins which are to be registered with Azkaban as project * validators that validate a project before uploaded into Azkaban. */ public interface ProjectValidatorCacheable extends ProjectValidator { /** * Get a hash representing a context state for a project. Adding or removing JARs should * not change this state. The state should only be based on text files. Two different projects * for which the validator returns the same cacheKey should have IDENTICAL validation results * for a given jar. I.e. if coollib-1.0.0.jar is included in Proj1 and is also in Proj2, so long * as the validator returns the same cacheKey for both Proj1 and Proj2 it should also return the * same validation result for the coollib-1.0.0.jar present in both projects. * * @param project project to get the cache key for * @param projectDir directory of the uncompressed project * @param additionalProps additional project-specific props (some validators may use this, others may not) * * @return Hash representing cache key for project */ String getCacheKey(Project project, File projectDir, Props additionalProps); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidationReport.java
package azkaban.project.validator; import java.io.File; import java.util.HashSet; import java.util.Objects; import java.util.Set; /** * The result of a project validation generated by a {@link ProjectValidator}. It contains an enum * of type {@link ValidationStatus} representing whether the validation passes, generates warnings, * or generates errors. Accordingly, three sets of String are also maintained, storing the messages * generated by the {@link ProjectValidator} at both {@link ValidationStatus#WARN} and {@link * ValidationStatus#ERROR} level, as well as information messages associated with both levels. */ public class ValidationReport { protected ValidationStatus _status; protected Set<File> _removedFiles; protected Set<File> _modifiedFiles; protected Set<String> _infoMsgs; protected Set<String> _warningMsgs; protected Set<String> _errorMsgs; public ValidationReport() { this._status = ValidationStatus.PASS; this._removedFiles = new HashSet<>(); this._modifiedFiles = new HashSet<>(); this._infoMsgs = new HashSet<>(); this._warningMsgs = new HashSet<>(); this._errorMsgs = new HashSet<>(); } /** * Return the severity level this information message is associated with. */ public static ValidationStatus getInfoMsgLevel(final String msg) { if (msg.startsWith("ERROR")) { return ValidationStatus.ERROR; } if (msg.startsWith("WARN")) { return ValidationStatus.WARN; } return ValidationStatus.PASS; } /** * Get the raw information message. */ public static String getInfoMsg(final String msg) { if (msg.startsWith("ERROR")) { return msg.replaceFirst("ERROR", ""); } if (msg.startsWith("WARN")) { return msg.replaceFirst("WARN", ""); } return msg; } /** * Add an information message associated with warning messages */ public void addWarnLevelInfoMsg(final String msg) { if (msg != null) { this._infoMsgs.add("WARN" + msg); } } /** * Add an information message associated with error messages */ public void addErrorLevelInfoMsg(final String msg) { if (msg != null) { this._infoMsgs.add("ERROR" + msg); } } /** * Add a message with status level being {@link ValidationStatus#WARN} */ public void addWarningMsgs(final Set<String> msgs) { if (msgs != null) { this._warningMsgs.addAll(msgs); if (!msgs.isEmpty() && this._errorMsgs.isEmpty()) { this._status = ValidationStatus.WARN; } } } /** * Add a message with status level being {@link ValidationStatus#ERROR} */ public void addErrorMsgs(final Set<String> msgs) { if (msgs != null) { this._errorMsgs.addAll(msgs); if (!msgs.isEmpty()) { this._status = ValidationStatus.ERROR; } } } /** * Add a set of modified OR created files */ public void addModifiedFiles(final Set<File> files) { this._modifiedFiles.addAll(files); } /** * Add one of modified OR created file */ public void addModifiedFile(final File file) { this._modifiedFiles.add(file); } /** * Add a set of removed files */ public void addRemovedFiles(final Set<File> files) { this._removedFiles.addAll(files); } /** * Add one removed file */ public void addRemovedFile(final File file) { this._removedFiles.add(file); } /** * Retrieve the status of the report. */ public ValidationStatus getStatus() { return this._status; } /** * Retrieve the list of information messages. */ public Set<String> getInfoMsgs() { return this._infoMsgs; } /** * Retrieve the messages associated with status level {@link ValidationStatus#WARN} */ public Set<String> getWarningMsgs() { return this._warningMsgs; } /** * Retrieve the messages associated with status level {@link ValidationStatus#ERROR} */ public Set<String> getErrorMsgs() { return this._errorMsgs; } /** * Get the set of modified OR created files */ public Set<File> getModifiedFiles() { return this._modifiedFiles; } /** * Get the set of removed files */ public Set<File> getRemovedFiles() { return this._removedFiles; } @Override public boolean equals(final Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ValidationReport that = (ValidationReport) o; return _status == that._status && _removedFiles.equals(that._removedFiles) && _modifiedFiles.equals( that._modifiedFiles) && _infoMsgs.equals(that._infoMsgs) && _warningMsgs.equals(that._warningMsgs) && _errorMsgs .equals(that._errorMsgs); } @Override public int hashCode() { return Objects.hash(_status, _removedFiles, _modifiedFiles, _infoMsgs, _warningMsgs, _errorMsgs); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidationStatus.java
package azkaban.project.validator; /** * Status of the ValidationReport. It also represents the severity of each rule. The order of * severity for the status is PASS < WARN < ERROR. */ public enum ValidationStatus { PASS("PASS"), WARN("WARN"), ERROR("ERROR"); private final String _status; private ValidationStatus(final String status) { this._status = status; } @Override public String toString() { return this._status; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidatorClassLoader.java
package azkaban.project.validator; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Vector; import java.util.jar.JarFile; import sun.net.www.protocol.jar.JarURLConnection; /** * Workaround for jdk 6 disgrace with open jar files & native libs, which is a reason of * unrefreshable classloader. */ public class ValidatorClassLoader extends URLClassLoader { protected HashSet<String> setJarFileNames2Close = new HashSet<>(); public ValidatorClassLoader(final URL[] urls, final ClassLoader parent) { super(urls, parent); } public ValidatorClassLoader(final URL[] urls) { super(urls); } @Override public void close() throws ValidatorManagerException { this.setJarFileNames2Close.clear(); closeClassLoader(this); finalizeNativeLibs(this); cleanupJarFileFactory(); } /** * cleanup jar file factory cache */ public boolean cleanupJarFileFactory() throws ValidatorManagerException { boolean res = false; final Class classJarURLConnection = JarURLConnection.class; Field f; try { f = classJarURLConnection.getDeclaredField("factory"); } catch (final NoSuchFieldException e) { throw new ValidatorManagerException(e); } if (f == null) { return false; } f.setAccessible(true); Object obj; try { obj = f.get(null); } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } if (obj == null) { return false; } final Class classJarFileFactory = obj.getClass(); HashMap fileCache = null; try { f = classJarFileFactory.getDeclaredField("fileCache"); f.setAccessible(true); obj = f.get(null); if (obj instanceof HashMap) { fileCache = (HashMap) obj; } } catch (NoSuchFieldException | IllegalAccessException e) { throw new ValidatorManagerException(e); } HashMap urlCache = null; try { f = classJarFileFactory.getDeclaredField("urlCache"); f.setAccessible(true); obj = f.get(null); if (obj instanceof HashMap) { urlCache = (HashMap) obj; } } catch (NoSuchFieldException | IllegalAccessException e) { throw new ValidatorManagerException(e); } if (urlCache != null) { final HashMap urlCacheTmp = (HashMap) urlCache.clone(); final Iterator it = urlCacheTmp.keySet().iterator(); while (it.hasNext()) { obj = it.next(); if (!(obj instanceof JarFile)) { continue; } final JarFile jarFile = (JarFile) obj; if (this.setJarFileNames2Close.contains(jarFile.getName())) { try { jarFile.close(); } catch (final IOException e) { throw new ValidatorManagerException(e); } if (fileCache != null) { fileCache.remove(urlCache.get(jarFile)); } urlCache.remove(jarFile); } } res = true; } else if (fileCache != null) { final HashMap fileCacheTmp = (HashMap) fileCache.clone(); final Iterator it = fileCacheTmp.keySet().iterator(); while (it.hasNext()) { final Object key = it.next(); obj = fileCache.get(key); if (!(obj instanceof JarFile)) { continue; } final JarFile jarFile = (JarFile) obj; if (this.setJarFileNames2Close.contains(jarFile.getName())) { try { jarFile.close(); } catch (final IOException e) { throw new ValidatorManagerException(e); } fileCache.remove(key); } } res = true; } this.setJarFileNames2Close.clear(); return res; } /** * close jar files of cl */ public boolean closeClassLoader(final ClassLoader cl) throws ValidatorManagerException { boolean res = false; if (cl == null) { return res; } final Class classURLClassLoader = URLClassLoader.class; Field f = null; try { f = classURLClassLoader.getDeclaredField("ucp"); } catch (final NoSuchFieldException e) { throw new ValidatorManagerException(e); } if (f != null) { f.setAccessible(true); Object obj = null; try { obj = f.get(cl); } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } if (obj != null) { final Object ucp = obj; f = null; try { f = ucp.getClass().getDeclaredField("loaders"); } catch (final NoSuchFieldException e) { throw new ValidatorManagerException(e); } if (f != null) { f.setAccessible(true); ArrayList loaders = null; try { loaders = (ArrayList) f.get(ucp); res = true; } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } for (int i = 0; loaders != null && i < loaders.size(); i++) { obj = loaders.get(i); f = null; try { f = obj.getClass().getDeclaredField("jar"); } catch (final NoSuchFieldException e) { throw new ValidatorManagerException(e); } if (f != null) { f.setAccessible(true); try { obj = f.get(obj); } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } if (obj instanceof JarFile) { final JarFile jarFile = (JarFile) obj; this.setJarFileNames2Close.add(jarFile.getName()); try { jarFile.close(); } catch (final IOException e) { throw new ValidatorManagerException(e); } } } } } } } return res; } /** * finalize native libraries */ public boolean finalizeNativeLibs(final ClassLoader cl) throws ValidatorManagerException { boolean res = false; final Class classClassLoader = ClassLoader.class; java.lang.reflect.Field nativeLibraries = null; try { nativeLibraries = classClassLoader.getDeclaredField("nativeLibraries"); } catch (final NoSuchFieldException e) { throw new ValidatorManagerException(e); } if (nativeLibraries == null) { return res; } nativeLibraries.setAccessible(true); Object obj = null; try { obj = nativeLibraries.get(cl); } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } if (!(obj instanceof Vector)) { return res; } res = true; final Vector java_lang_ClassLoader_NativeLibrary = (Vector) obj; for (final Object lib : java_lang_ClassLoader_NativeLibrary) { java.lang.reflect.Method finalize = null; try { finalize = lib.getClass().getDeclaredMethod("finalize", new Class[0]); } catch (final NoSuchMethodException e) { throw new ValidatorManagerException(e); } if (finalize != null) { finalize.setAccessible(true); try { finalize.invoke(lib, new Object[0]); } catch (final IllegalAccessException e) { throw new ValidatorManagerException(e); } catch (final InvocationTargetException e) { throw new ValidatorManagerException(e); } } } return res; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidatorConfigs.java
package azkaban.project.validator; public class ValidatorConfigs { /** * Key for the config param specifying the directory containing validator JAR files **/ public static final String VALIDATOR_PLUGIN_DIR = "project.validators.dir"; /** * Default validator directory **/ public static final String DEFAULT_VALIDATOR_DIR = "validators"; /** * Key for the config param specifying the location of validator xml configuration file, no * default value **/ public static final String XML_FILE_PARAM = "project.validators.xml.file"; /** * Key for the config param indicating whether the user choose to turn on the auto-fix feature **/ public static final String CUSTOM_AUTO_FIX_FLAG_PARAM = "project.validators.fix.flag"; /** * Default custom auto fix flag. Turn auto-fix feature on by default. **/ public static final Boolean DEFAULT_CUSTOM_AUTO_FIX_FLAG = true; /** * Key for the config param indicating whether to show auto-fix related UI to the user **/ public static final String VALIDATOR_AUTO_FIX_PROMPT_FLAG_PARAM = "project.validators.fix.prompt"; /** * Do not show auto-fix related UI by default **/ public static final Boolean DEFAULT_VALIDATOR_AUTO_FIX_PROMPT_FLAG = false; /** * Key for the config param specifying the label to be displayed with auto-fix UI **/ public static final String VALIDATOR_AUTO_FIX_PROMPT_LABEL_PARAM = "project.validators.fix.label"; /** * Key for the config param specifying the link address with detailed information about auto-fix **/ public static final String VALIDATOR_AUTO_FIX_PROMPT_LINK_PARAM = "project.validators.fix.link"; private ValidatorConfigs() { } // Prevents instantiation }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidatorManager.java
package azkaban.project.validator; import azkaban.project.Project; import azkaban.utils.Props; import java.io.File; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; /** * ValidatorManager is responsible for loading the list of validators specified in the Azkaban * validator configuration file. Once these validators are loaded, the ValidatorManager will use the * registered validators to verify each uploaded project before persisting it. */ public interface ValidatorManager { /** * Load the validators using the given properties. Each validator is also given the specified * logger to record any necessary message in the Azkaban log file. */ void loadValidators(Props props, Logger logger); /** * Validate the given project using the registered list of validators. This method returns a map * of {@link ValidationReport} with the key being the validator's name and the value being the * {@link ValidationReport} generated by that validator. */ Map<String, ValidationReport> validate(Project project, File projectDir, Props props); /** * Returns a list of String containing the name of each registered validators. */ List<String> getValidatorsInfo(); /** * Get combined cacheKey for all validators (hash of all hashes) */ String getCacheKey(Project project, File projectDir, Props props); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/ValidatorManagerException.java
package azkaban.project.validator; public class ValidatorManagerException extends RuntimeException { private static final long serialVersionUID = 1L; public ValidatorManagerException(final String message) { super(message); } public ValidatorManagerException(final Throwable cause) { super(cause); } public ValidatorManagerException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/project/validator/XmlValidatorManager.java
package azkaban.project.validator; import azkaban.project.Project; import azkaban.utils.HashUtils; import azkaban.utils.Props; import java.io.File; import java.io.IOException; import java.lang.reflect.Constructor; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; /** * Xml implementation of the ValidatorManager. * * <p>Looks for the property project.validators.xml.file in the azkaban properties. * * <p>The xml to be in the following form: * <pre>{@code * <azkaban-validators> * <validator classname="validator class name"> * <!-- optional configurations for each individual validator --> * <property key="validator property key" value="validator property value" /> * ... * </validator> * </azkaban-validators> * }</pre> */ public class XmlValidatorManager implements ValidatorManager { public static final String VALIDATOR_TAG = "validator"; public static final String CLASSNAME_ATTR = "classname"; public static final String ITEM_TAG = "property"; private static final Logger logger = Logger.getLogger(XmlValidatorManager.class); private ValidatorClassLoader validatorLoader; private final String validatorDirPath; private Map<String, ProjectValidator> validators; /** * Load the validator plugins from the validator directory (default being validators/) into the * validator ClassLoader. This enables creating instances of these validators in the * loadValidators() method. */ // Todo jamiesjc: guicify XmlValidatorManager class public XmlValidatorManager(final Props props) { this.validatorDirPath = props .getString(ValidatorConfigs.VALIDATOR_PLUGIN_DIR, ValidatorConfigs.DEFAULT_VALIDATOR_DIR); final File validatorDir = new File(this.validatorDirPath); if (!validatorDir.canRead() || !validatorDir.isDirectory()) { logger.warn("Validator directory " + this.validatorDirPath + " does not exist or is not a directory."); } // Initialize the class loader. initClassLoader(); // Load the validators specified in the xml file. try { loadValidators(props, logger); } catch (final Exception e) { logger.error("Cannot load all the validators."); throw new ValidatorManagerException(e); } } private void initClassLoader() { final File validatorDir = new File(this.validatorDirPath); final List<URL> resources = new ArrayList<>(); try { if (validatorDir.canRead() && validatorDir.isDirectory()) { for (final File f : validatorDir.listFiles()) { if (f.getName().endsWith(".jar")) { resources.add(f.toURI().toURL()); } } } } catch (final MalformedURLException e) { throw new ValidatorManagerException(e); } validatorLoader = new ValidatorClassLoader(resources.toArray(new URL[resources.size()])); } /** * Creates instances of the validators, passing in any props that are global and not project specific. * These validator instances are global and will be used for all projects. * * {@inheritDoc} * * @see azkaban.project.validator.ValidatorManager#loadValidators(azkaban.utils.Props, * org.apache.log4j.Logger) */ @Override public void loadValidators(final Props props, final Logger log) { this.validators = new LinkedHashMap<>(); if (!props.containsKey(ValidatorConfigs.XML_FILE_PARAM)) { logger.warn( "Azkaban properties file does not contain the key " + ValidatorConfigs.XML_FILE_PARAM); return; } final String xmlPath = props.get(ValidatorConfigs.XML_FILE_PARAM); final File file = new File(xmlPath); if (!file.exists()) { logger.error("Azkaban validator configuration file " + xmlPath + " does not exist."); return; } // Creating the document builder to parse xml. final DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = null; try { builder = docBuilderFactory.newDocumentBuilder(); } catch (final ParserConfigurationException e) { throw new ValidatorManagerException( "Exception while parsing validator xml. Document builder not created.", e); } Document doc = null; try { doc = builder.parse(file); } catch (final SAXException e) { throw new ValidatorManagerException("Exception while parsing " + xmlPath + ". Invalid XML.", e); } catch (final IOException e) { throw new ValidatorManagerException("Exception while parsing " + xmlPath + ". Error reading file.", e); } final NodeList tagList = doc.getChildNodes(); final Node azkabanValidators = tagList.item(0); final NodeList azkabanValidatorsList = azkabanValidators.getChildNodes(); for (int i = 0; i < azkabanValidatorsList.getLength(); ++i) { final Node node = azkabanValidatorsList.item(i); if (node.getNodeType() == Node.ELEMENT_NODE) { if (node.getNodeName().equals(VALIDATOR_TAG)) { parseValidatorTag(node, props, log); } } } } private void parseValidatorTag(final Node node, final Props props, final Logger log) { final NamedNodeMap validatorAttrMap = node.getAttributes(); final Node classNameAttr = validatorAttrMap.getNamedItem(CLASSNAME_ATTR); if (classNameAttr == null) { throw new ValidatorManagerException( "Error loading validator. The validator 'classname' attribute doesn't exist"); } final NodeList keyValueItemsList = node.getChildNodes(); for (int i = 0; i < keyValueItemsList.getLength(); i++) { final Node keyValuePair = keyValueItemsList.item(i); if (keyValuePair.getNodeName().equals(ITEM_TAG)) { parseItemTag(keyValuePair, props); } } final String className = classNameAttr.getNodeValue(); try { // Attempt to instantiate original ProjectValidator final Class<? extends ProjectValidator> validatorClass = (Class<? extends ProjectValidator>) validatorLoader.loadClass(className); final Constructor<?> validatorConstructor = validatorClass.getConstructor(Logger.class); final ProjectValidator validator = (ProjectValidator) validatorConstructor.newInstance(log); validator.initialize(props); this.validators.put(validator.getValidatorName(), validator); logger.info("Added validator " + className + " to list of validators."); } catch (final Exception e) { logger.error("Could not instantiate ProjectValidator " + className); throw new ValidatorManagerException(e); } } private void parseItemTag(final Node node, final Props props) { final NamedNodeMap keyValueMap = node.getAttributes(); final Node keyAttr = keyValueMap.getNamedItem("key"); final Node valueAttr = keyValueMap.getNamedItem("value"); if (keyAttr == null || valueAttr == null) { throw new ValidatorManagerException("Error loading validator key/value " + "pair. The 'key' or 'value' attribute doesn't exist"); } props.put(keyAttr.getNodeValue(), valueAttr.getNodeValue()); } /** * Gets a SHA1 hash of the combined cache keys for all loaded validators. * * @see azkaban.project.validator.ProjectValidatorCacheable#getCacheKey(azkaban.project.Project, java.io.File, * azkaban.utils.Props) */ @Override public String getCacheKey(final Project project, final File projectDir, final Props props) { final Props nonNullProps = props == null ? new Props() : props; StringBuilder compoundedKey = new StringBuilder(); for (final Entry<String, ProjectValidator> validator : this.validators.entrySet()) { try { // Attempt to cast to ProjectValidatorCacheable ProjectValidatorCacheable cacheableValidator = (ProjectValidatorCacheable) validator.getValue(); compoundedKey.append(cacheableValidator.getCacheKey(project, projectDir, nonNullProps)); } catch (ClassCastException e) { // Swallow this error - the validator must not have been a cacheable validator } } return HashUtils.SHA1.getHashStr(compoundedKey.toString()); } /** * Validates the project with all loaded validators. * * @see azkaban.project.validator.ProjectValidator#validateProject(azkaban.project.Project, java.io.File, * azkaban.utils.Props) */ @Override public Map<String, ValidationReport> validate(final Project project, final File projectDir, final Props additionalProps) { final Props nonNullAdditionalProps = additionalProps == null ? new Props() : additionalProps; final Map<String, ValidationReport> reports = new LinkedHashMap<>(); for (final Entry<String, ProjectValidator> validator : this.validators.entrySet()) { reports.put(validator.getKey(), validator.getValue().validateProject(project, projectDir, nonNullAdditionalProps)); logger.info("Validation status of validator " + validator.getKey() + " is " + reports.get(validator.getKey()).getStatus()); } return reports; } @Override public List<String> getValidatorsInfo() { final List<String> info = new ArrayList<>(); for (final String key : this.validators.keySet()) { info.add(key); } return info; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/AbstractRampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutableRamp; import azkaban.utils.Props; /** * Abstract Ramp Policy */ public abstract class AbstractRampPolicy implements RampPolicy { protected volatile Props sysProps; protected volatile Props privateProps; protected AbstractRampPolicy(final Props sysProps, final Props privateProps) { this.sysProps = sysProps; this.privateProps = privateProps; } @Override public boolean check(ExecutableFlow flow, ExecutableRamp executableRamp) { preprocess(executableRamp); if (executableRamp.isNotTestable()) { return false; // filter out inactive or paused executable ramp flow } return isRampTestEnabled(flow, executableRamp); } protected boolean isRampTestEnabled(ExecutableFlow flow, ExecutableRamp executableRamp) { return false; } protected void preprocess(ExecutableRamp executableRamp) { } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/FullRampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutableRamp; import azkaban.utils.Props; /** * Full Ramp Policy is a Dummy Ramp Policy which does apply any ramp upon the job flow. */ public final class FullRampPolicy extends AbstractRampPolicy { public FullRampPolicy(Props sysProps, Props privateProps) { super(sysProps, privateProps); } @Override protected boolean isRampTestEnabled(ExecutableFlow flow, ExecutableRamp executableRamp) { return true; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/NoopRampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.utils.Props; /** * Noop Ramp Policy is a Dummy Ramp Policy which does not apply any ramp upon the job flow. */ public final class NoopRampPolicy extends AbstractRampPolicy { public NoopRampPolicy(Props sysProps, Props privateProps) { super(sysProps, privateProps); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/RampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutableRamp; /** * Raw Ramp Policy interface. * * A ramp policy is a defined strategy for rampping service. */ public interface RampPolicy { /** * Run the ramp policy. In general this method can only be apply once. Must either succeed or throw an * * @param flow executable flow * @param executableRamp executed ramp * @return if the flow is qualified for ramp, returns TRUE. * @throws Exception */ boolean check( ExecutableFlow flow, ExecutableRamp executableRamp ) throws Exception; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/RampPolicyExecutionException.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; /** * Defined Exception Type for running Ramp Policy */ public class RampPolicyExecutionException extends RuntimeException { private static final long serialVersionUID = 1L; public RampPolicyExecutionException(final String message) { super(message); } public RampPolicyExecutionException(final Throwable cause) { super(cause); } public RampPolicyExecutionException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/RampPolicyManager.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.Constants; import azkaban.utils.DIUtils; import azkaban.utils.FileIOUtils; import azkaban.utils.Props; import azkaban.utils.PropsUtils; import azkaban.utils.Utils; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.List; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Ramp Policy Manager will manager the Ramp strategy which will be applied upon Global Configuration Management */ public class RampPolicyManager { private static final Logger LOGGER = LoggerFactory.getLogger(RampPolicyManager.class); private final String pluginsDir; // the dir for ramp policy plugins private final ClassLoader parentClassLoader; private final Props globalProperties; private RampPolicyPluginSet pluginSet; public RampPolicyManager(final String pluginsDir, final Props globalProperties, final ClassLoader parentClassLoader) { this.pluginsDir = pluginsDir; this.globalProperties = globalProperties; this.parentClassLoader = parentClassLoader; loadPlugins(); } public void loadPlugins() throws RampPolicyManagerException { final RampPolicyPluginSet plugins = new RampPolicyPluginSet(); loadDefaultTypes(plugins); if (this.pluginsDir != null) { final File pluginsDir = new File(this.pluginsDir); if (FileIOUtils.isValidDirectory(pluginsDir)) { try { LOGGER.info("Ramp policy plugins directory set. Loading extra ramp policies from " + pluginsDir); loadPluginRampPolicies(plugins, pluginsDir); } catch (final Exception e) { LOGGER.info("Ramp Policy Plugins failed to load. " + e.getCause(), e); throw new RampPolicyManagerException(e); } } } // Swap the plugin set. If exception is thrown, then plugin isn't swapped. synchronized (this) { this.pluginSet = plugins; } } // load default RampPolicy, which are the NOOP ramp policy and the FULL ramp policy private void loadDefaultTypes(final RampPolicyPluginSet plugins) throws RampPolicyManagerException { LOGGER.info("Loading plugin default ramp policies"); plugins.addPluginClass(NoopRampPolicy.class.getSimpleName(), NoopRampPolicy.class); plugins.addPluginClass(FullRampPolicy.class.getSimpleName(), FullRampPolicy.class); plugins.addPluginClass(SimpleAutoRampPolicy.class.getSimpleName(), SimpleAutoRampPolicy.class); plugins.addPluginClass(SimpleQuickRampPolicy.class.getSimpleName(), SimpleQuickRampPolicy.class); plugins.addPluginClass(SimpleRampPolicy.class.getSimpleName(), SimpleRampPolicy.class); } // load ramp policies from ramp policy plugin dir private void loadPluginRampPolicies(final RampPolicyPluginSet plugins, final File pluginsDir) throws RampPolicyManagerException { // Load the common properties used by all ramp policies that are run try { plugins.setCommonPluginProps( new Props(this.globalProperties, new File(pluginsDir, Constants.PluginManager.COMMONCONFFILE)) ); } catch (IOException e) { throw new RampPolicyManagerException("Failed to load common plugin job properties" + e.getCause()); } // Loads the common properties used by all ramp policy plugins when loading try { plugins.setCommonPluginLoadProps( new Props(null, new File(pluginsDir, Constants.PluginManager.COMMONSYSCONFFILE)) ); } catch (IOException e) { throw new RampPolicyManagerException("Failed to load common plugin loader properties" + e.getCause()); } // Loading ramp policy plugins for (final File pluginDir : pluginsDir.listFiles()) { if (FileIOUtils.isValidDirectory(pluginDir)) { try { loadRampPolicies(pluginDir, plugins); } catch (final Exception e) { LOGGER.error("Failed to load ramp policy " + pluginDir.getName() + e.getMessage(), e); throw new RampPolicyManagerException(e); } } } } private void loadRampPolicies(final File pluginDir, final RampPolicyPluginSet plugins) throws RampPolicyManagerException { // Directory is the ramp policy name final String rampPolicyName = pluginDir.getName(); LOGGER.info("Loading plugin " + rampPolicyName); final File pluginPropsFile = new File(pluginDir, Constants.PluginManager.CONFFILE); final File pluginLoadPropsFile = new File(pluginDir, Constants.PluginManager.SYSCONFFILE); if (!pluginPropsFile.exists()) { LOGGER.info("Plugin load props file " + pluginPropsFile + " not found."); return; } Props pluginProps = null; Props pluginLoadProps = null; try { pluginProps = new Props(plugins.getCommonPluginProps(), pluginPropsFile); // Adding "plugin.dir" to allow plugin.properties file could read this property. Also, user // code could leverage this property as well. pluginProps.put("plugin.dir", pluginDir.getAbsolutePath()); plugins.addPluginProps(rampPolicyName, pluginProps); pluginLoadProps = new Props(plugins.getCommonPluginLoadProps(), pluginLoadPropsFile); pluginLoadProps.put("plugin.dir", pluginDir.getAbsolutePath()); pluginLoadProps = PropsUtils.resolveProps(pluginLoadProps); // Add properties into the plugin set plugins.addPluginLoadProps(rampPolicyName, pluginLoadProps); } catch (final Exception e) { LOGGER.error("pluginLoadProps to help with debugging: " + pluginLoadProps); throw new RampPolicyManagerException("Failed to get ramp policy properties" + e.getMessage(), e); } final ClassLoader rampPolicyClassLoader = loadRampPolicyClassLoader(pluginDir, rampPolicyName, plugins); final String rampPolicyClass = pluginLoadProps.get("ramppolicy.class"); Class<? extends RampPolicy> clazz = null; try { clazz = (Class<? extends RampPolicy>) rampPolicyClassLoader.loadClass(rampPolicyClass); plugins.addPluginClass(rampPolicyName, clazz); } catch (final ClassNotFoundException e) { throw new RampPolicyManagerException(e); } LOGGER.info("Verifying ramp policy plugin " + rampPolicyName); try { final Props fakeSysProps = new Props(pluginLoadProps); final Props fakeProps = new Props(pluginProps); final RampPolicy obj = (RampPolicy) Utils.callConstructor(clazz, fakeSysProps, fakeProps); } catch (final Throwable t) { LOGGER.info("RampPolicy " + rampPolicyName + " failed test!", t); throw new RampPolicyExecutionException(t); } LOGGER.info("Loaded ramp policy " + rampPolicyName + " " + rampPolicyClass); } /** * Creates and loads all plugin resources (jars) into a ClassLoader */ private ClassLoader loadRampPolicyClassLoader(final File pluginDir, final String rampPolicyName, final RampPolicyPluginSet plugins) { // sysconf says what jars/confs to load final Props pluginLoadProps = plugins.getPluginLoadProps(rampPolicyName); final List<URL> resources = new ArrayList<>(); try { final List<String> typeGlobalClassPath = pluginLoadProps.getStringList("ramppolicy.global.classpath", null, ","); final List<String> typeClassPath = pluginLoadProps.getStringList("ramppolicy.classpath", null, ","); final List<String> ramppolicyLibDirs = pluginLoadProps.getStringList("ramppolicy.lib.dir", null, ","); resources.addAll(DIUtils.loadResources( pluginDir, typeGlobalClassPath, typeClassPath, ramppolicyLibDirs )); } catch (final MalformedURLException e) { throw new RampPolicyManagerException(e); } // each ramp policy can have a different class loader LOGGER.info( String.format("Classpath for plugin[dir: %s, ramp-policy: %s]: %s", pluginDir, rampPolicyName, resources) ); return DIUtils.getClassLoader(resources, this.parentClassLoader); } public RampPolicy buildRampPolicyExecutor(final String rampPolicyName, Props props) { final Class<? extends Object> executorClass = pluginSet.getPluginClass(rampPolicyName); if (executorClass == null) { throw new RampPolicyExecutionException( String.format("Ramp Policy is unrecognized. Could not construct ramp policy [%s]", rampPolicyName) ); } Props commonProps = pluginSet.getPluginCommonProps(rampPolicyName); if (commonProps == null) { commonProps = pluginSet.getCommonPluginProps(); } if (commonProps != null) { for (final String k : commonProps.getKeySet()) { if (!props.containsKey(k)) { props.put(k, commonProps.get(k)); } } } props = PropsUtils.resolveProps(props); Props loadProps = pluginSet.getPluginCommonProps(rampPolicyName); if (loadProps != null) { loadProps = PropsUtils.resolveProps(loadProps); } else { loadProps = pluginSet.getCommonPluginLoadProps(); if (loadProps == null) { loadProps = new Props(); } } RampPolicy rampPolicy = (RampPolicy) Utils.callConstructor(executorClass, loadProps, props); return rampPolicy; } /** * Public for test reasons. Will need to move tests to the same package */ public synchronized RampPolicyPluginSet getRampPolicyPluginSet() { return this.pluginSet; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/RampPolicyManagerException.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; /** * Defined Exception Type for Ramp Policy Manager */ public class RampPolicyManagerException extends RuntimeException { private static final long serialVersionUID = 1L; public RampPolicyManagerException(final String message) { super(message); } public RampPolicyManagerException(final Throwable cause) { super(cause); } public RampPolicyManagerException(final String message, final Throwable cause) { super(message, cause); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/RampPolicyPluginSet.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.utils.Props; import java.util.HashMap; import java.util.Map; /** * Container for ramp policy plugins * * This contains the RampPolicy Class objects, the properties for loading plugins, and the properties given * by default to the plugin. * * This class is not thread safe, so adding to this class should only be populated and controlled by * the RampPolicyManager */ public class RampPolicyPluginSet { private final Map<String, Class<? extends RampPolicy>> rampPolicyToClass; private final Map<String, Props> pluginCommonPropsMap; private final Map<String, Props> pluginLoadPropsMap; private Props commonProps; private Props commonLoadProps; /** * Base constructor */ public RampPolicyPluginSet() { this.rampPolicyToClass = new HashMap<>(); this.pluginCommonPropsMap = new HashMap<>(); this.pluginLoadPropsMap = new HashMap<>(); } /** * Copy constructor */ public RampPolicyPluginSet(final RampPolicyPluginSet clone) { this.rampPolicyToClass = new HashMap<>(clone.rampPolicyToClass); this.pluginCommonPropsMap = new HashMap<>(clone.pluginCommonPropsMap); this.pluginLoadPropsMap = new HashMap<>(clone.pluginLoadPropsMap); this.commonProps = clone.commonProps; this.commonLoadProps = clone.commonLoadProps; } /** * Gets common properties for every ramp policy */ public Props getCommonPluginProps() { return this.commonProps; } /** * Sets the common properties shared in every ramp policy */ public void setCommonPluginProps(final Props commonProps) { this.commonProps = commonProps; } /** * Gets the common properties used to load a plugin */ public Props getCommonPluginLoadProps() { return this.commonLoadProps; } /** * Sets the common properties used to load every plugin */ public void setCommonPluginLoadProps(final Props commonLoadProps) { this.commonLoadProps = commonLoadProps; } /** * Get the properties for a ramp policy used to setup and load a plugin */ public Props getPluginLoadProps(final String rampPolicyName) { return this.pluginLoadPropsMap.get(rampPolicyName); } /** * Get the properties that will be given to the plugin as common properties. */ public Props getPluginCommonProps(final String rampPolicyName) { return this.pluginCommonPropsMap.get(rampPolicyName); } /** * Gets the plugin ramp policy class */ public Class<? extends RampPolicy> getPluginClass(final String rampPolicyName) { return this.rampPolicyToClass.get(rampPolicyName); } /** * Adds plugin ramp-policy class */ public void addPluginClass(final String rampPolicyName, final Class<? extends RampPolicy> rampPolicyClass) { this.rampPolicyToClass.put(rampPolicyName, rampPolicyClass); } /** * Adds plugin common properties used as default runtime properties. if props is null, nothing will be added. * * @param rampPolicyName ramp policy name * @param props nullable props */ public void addPluginProps(final String rampPolicyName, final Props props) { if (props != null) { this.pluginCommonPropsMap.put(rampPolicyName, props); } } /** * Adds plugin load properties used to load the plugin. if props is null, nothing will be added. * * @param rampPolicyName ramp policy name * @param props nullable props */ public void addPluginLoadProps(final String rampPolicyName, final Props props) { if (props != null) { this.pluginLoadPropsMap.put(rampPolicyName, props); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/SimpleAutoRampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutableRamp; import azkaban.utils.Props; import azkaban.utils.TimeUtils; import com.google.common.collect.ImmutableList; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Simple Auto Ramp Policy will be divided to 5 stages * stage 1: 5% * stage 2: 20% * stage 3: 50% * stage 4: 75% * stage 5: 100% */ public class SimpleAutoRampPolicy extends SimpleRampPolicy { private static final int MAX_RAMP_STAGE = 5; private static final ImmutableList<Integer> RAMP_STAGE_RESCALE_TABLE = ImmutableList.<Integer>builder() .add(5, 25, 50, 75) .build(); private static final ImmutableList<Integer> AUTO_RAMP_INTERVAL_TABLE = ImmutableList.<Integer>builder() .add(1, 2, 3, 4) .build(); private static final Logger LOGGER = LoggerFactory.getLogger(SimpleAutoRampPolicy.class); public SimpleAutoRampPolicy(Props sysProps, Props privateProps) { super(sysProps, privateProps); } @Override protected int getMaxRampStage() { return MAX_RAMP_STAGE; } @Override protected int getRampStage(ExecutableFlow flow) { int percentage = flow.getRampPercentageId(); for(int i = 0; i < RAMP_STAGE_RESCALE_TABLE.size(); i++) { if (percentage < RAMP_STAGE_RESCALE_TABLE.get(i)) { return (i + 1); } } return MAX_RAMP_STAGE; } @Override protected void preprocess(ExecutableRamp executableRamp) { int escapedDays = TimeUtils.daysEscapedOver(executableRamp.getStartTime()); int rampStage = executableRamp.getStage(); int maxStage = getMaxRampStage(); if (rampStage == 0) { // The ramp is still not stated yet. Auto Ramp should not be triggered. return; } try { if (escapedDays >= AUTO_RAMP_INTERVAL_TABLE.get(rampStage - 1)) { if (rampStage < maxStage) { // Ramp up executableRamp.rampUp(maxStage); LOGGER.info("[AUTO RAMP UP] (rampId = {}, current Stage = {}, new Stage = {}, timeStamp = {}", executableRamp.getId(), rampStage, executableRamp.getStage(), executableRamp.getLastUpdatedTime()); } } } catch (Exception e) { LOGGER.error("[AUTO RAMP ERROR] (rampId = {}, ramStage = {}, message = {}", executableRamp.getId(), rampStage, e.getMessage()); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/SimpleQuickRampPolicy.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.utils.Props; import com.google.common.collect.ImmutableList; /** * Simple Auto Ramp Policy will be divided to 4 stages * stage 1: 5% * stage 2: 20% * stage 3: 50% * stage 4: 100% */ public class SimpleQuickRampPolicy extends SimpleRampPolicy { private static final int MAX_RAMP_STAGE = 4; private static final ImmutableList<Integer> RAMP_STAGE_RESCALE_TABLE = ImmutableList.<Integer>builder() .add(5, 20, 50) .build(); public SimpleQuickRampPolicy(Props sysProps, Props privateProps) { super(sysProps, privateProps); } @Override protected int getMaxRampStage() { return MAX_RAMP_STAGE; } @Override protected int getRampStage(ExecutableFlow flow) { int percentage = flow.getRampPercentageId(); for(int i = 0; i < RAMP_STAGE_RESCALE_TABLE.size(); i++) { if (percentage < RAMP_STAGE_RESCALE_TABLE.get(i)) { return (i + 1); } } return MAX_RAMP_STAGE; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/ramppolicy/SimpleRampPolicy.java
package azkaban.ramppolicy; import azkaban.executor.ExecutableFlow; import azkaban.executor.ExecutableRamp; import azkaban.utils.Props; /** * Define a simple ramp policy to ramp by percentage */ public class SimpleRampPolicy extends AbstractRampPolicy { private static final int MAX_RAMP_STAGE = 100; public SimpleRampPolicy(Props sysProps, Props privateProps) { super(sysProps, privateProps); } @Override protected boolean isRampTestEnabled(ExecutableFlow flow, ExecutableRamp executableRamp) { int rampStage = executableRamp.getStage(); // scaled from 0 - 100 to represent the ramp percentage if (rampStage >= getMaxRampStage()) { return true; } return (getRampStage(flow) <= rampStage); } protected int getMaxRampStage() { return MAX_RAMP_STAGE; } protected int getRampStage(ExecutableFlow flow) { return flow.getRampPercentageId() + 1; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/scheduler/Schedule.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.scheduler; import azkaban.executor.ExecutionOptions; import azkaban.utils.Pair; import azkaban.utils.TimeUtils; import azkaban.utils.Utils; import java.util.Date; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.ReadablePeriod; import org.quartz.CronExpression; public class Schedule { private final int projectId; private final String projectName; private final String flowName; private final long firstSchedTime; private final long endSchedTime; private final DateTimeZone timezone; private final long lastModifyTime; private final ReadablePeriod period; private final String submitUser; private final String status; private final long submitTime; private final String cronExpression; private final boolean skipPastOccurrences = true; private int scheduleId; private long nextExecTime; private ExecutionOptions executionOptions; public Schedule(final int scheduleId, final int projectId, final String projectName, final String flowName, final String status, final long firstSchedTime, final long endSchedTime, final DateTimeZone timezone, final ReadablePeriod period, final long lastModifyTime, final long nextExecTime, final long submitTime, final String submitUser, final ExecutionOptions executionOptions, final String cronExpression) { this.scheduleId = scheduleId; this.projectId = projectId; this.projectName = projectName; this.flowName = flowName; this.firstSchedTime = firstSchedTime; this.endSchedTime = endSchedTime; this.timezone = timezone; this.lastModifyTime = lastModifyTime; this.period = period; this.nextExecTime = nextExecTime; this.submitUser = submitUser; this.status = status; this.submitTime = submitTime; this.executionOptions = executionOptions; this.cronExpression = cronExpression; } public ExecutionOptions getExecutionOptions() { return this.executionOptions; } public void setFlowOptions(final ExecutionOptions executionOptions) { this.executionOptions = executionOptions; } public String getScheduleName() { return this.projectName + "." + this.flowName + " (" + this.projectId + ")"; } @Override public String toString() { final String underlying = this.projectName + "." + this.flowName + " (" + this.projectId + ")" + " to be run at (starting) " + new DateTime( this.firstSchedTime).toDateTimeISO(); if (this.period == null && this.cronExpression == null) { return underlying + " non-recurring"; } else if (this.cronExpression != null) { return underlying + " with CronExpression {" + this.cronExpression + "} and timezone " + timezone.getID(); } else { return underlying + " with precurring period of " + TimeUtils.createPeriodString(this.period); } } public Pair<Integer, String> getScheduleIdentityPair() { return new Pair<>(getProjectId(), getFlowName()); } public int getScheduleId() { return this.scheduleId; } public void setScheduleId(final int scheduleId) { this.scheduleId = scheduleId; } public int getProjectId() { return this.projectId; } public String getProjectName() { return this.projectName; } public String getFlowName() { return this.flowName; } public long getFirstSchedTime() { return this.firstSchedTime; } public DateTimeZone getTimezone() { return this.timezone; } public long getLastModifyTime() { return this.lastModifyTime; } public ReadablePeriod getPeriod() { return this.period; } public long getNextExecTime() { return this.nextExecTime; } public void setNextExecTime(final long nextExecTime) { this.nextExecTime = nextExecTime; } public String getSubmitUser() { return this.submitUser; } public String getStatus() { return this.status; } public long getSubmitTime() { return this.submitTime; } public String getCronExpression() { return this.cronExpression; } public boolean updateTime() { if (new DateTime(this.nextExecTime).isAfterNow()) { return true; } if (this.cronExpression != null) { final DateTime nextTime = getNextCronRuntime( this.nextExecTime, this.timezone, Utils.parseCronExpression(this.cronExpression, this.timezone)); this.nextExecTime = nextTime.getMillis(); return true; } if (this.period != null) { final DateTime nextTime = getNextRuntime(this.nextExecTime, this.timezone, this.period); this.nextExecTime = nextTime.getMillis(); return true; } return false; } private DateTime getNextRuntime(final long scheduleTime, final DateTimeZone timezone, final ReadablePeriod period) { final DateTime now = new DateTime(); DateTime date = new DateTime(scheduleTime).withZone(timezone); int count = 0; while (!now.isBefore(date)) { if (count > 100000) { throw new IllegalStateException( "100000 increments of period did not get to present time."); } if (period == null) { break; } else { date = date.plus(period); } count += 1; } return date; } /** * @param scheduleTime represents the time when Schedule Servlet receives the Cron Schedule API * call. * @param timezone is always UTC (after 3.1.0) * @return the First Scheduled DateTime to run this flow. */ private DateTime getNextCronRuntime(final long scheduleTime, final DateTimeZone timezone, final CronExpression ce) { Date date = new DateTime(scheduleTime).withZone(timezone).toDate(); if (ce != null) { date = ce.getNextValidTimeAfter(date); } return new DateTime(date); } public boolean isRecurring() { return this.period != null || this.cronExpression != null; } public boolean skipPastOccurrences() { return this.skipPastOccurrences; } public long getEndSchedTime() { return this.endSchedTime; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/scheduler/ScheduleLoader.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.scheduler; import java.util.List; public interface ScheduleLoader { public void insertSchedule(Schedule s) throws ScheduleManagerException; public void updateSchedule(Schedule s) throws ScheduleManagerException; public void removeSchedule(Schedule s) throws ScheduleManagerException; public void updateNextExecTime(Schedule s) throws ScheduleManagerException; public List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException; }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/scheduler/ScheduleManager.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.scheduler; import azkaban.executor.ExecutionOptions; import azkaban.trigger.TriggerAgent; import azkaban.trigger.TriggerStatus; import azkaban.utils.Pair; import azkaban.utils.Props; import javax.inject.Inject; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; import org.joda.time.DateTimeZone; import org.joda.time.ReadablePeriod; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * The ScheduleManager stores and executes the schedule. It uses a single thread instead and waits * until correct loading time for the flow. It will not remove the flow from the schedule when it is * run, which can potentially allow the flow to and overlap each other. * * TODO kunkun-tang: When new AZ quartz Scheduler comes, we will remove this class. */ public class ScheduleManager implements TriggerAgent { public static final String SIMPLE_TIME_TRIGGER = "SimpleTimeTrigger"; private static final Logger logger = Logger.getLogger(ScheduleManager.class); private final DateTimeFormatter _dateFormat = DateTimeFormat .forPattern("MM-dd-yyyy HH:mm:ss:SSS"); private final ScheduleLoader loader; private final Map<Integer, Schedule> scheduleIDMap = new LinkedHashMap<>(); private final Map<Pair<Integer, String>, Schedule> scheduleIdentityPairMap = new LinkedHashMap<>(); /** * Give the schedule manager a loader class that will properly load the schedule. */ @Inject public ScheduleManager(final ScheduleLoader loader) { this.loader = loader; } // Since ScheduleManager was already replaced by TriggerManager, many methods like start are // never used. @Deprecated @Override public void start() throws ScheduleManagerException { } // only do this when using external runner private synchronized void updateLocal() throws ScheduleManagerException { final List<Schedule> updates = this.loader.loadUpdatedSchedules(); for (final Schedule s : updates) { if (s.getStatus().equals(TriggerStatus.EXPIRED.toString())) { onScheduleExpire(s); } else { internalSchedule(s); } } } private void onScheduleExpire(final Schedule s) { removeSchedule(s); } /** * Shutdowns the scheduler thread. After shutdown, it may not be safe to use it again. */ @Override public void shutdown() { } /** * Retrieves a copy of the list of schedules. */ public synchronized List<Schedule> getSchedules() throws ScheduleManagerException { updateLocal(); return new ArrayList<>(this.scheduleIDMap.values()); } /** * Returns the scheduled flow for the flow name */ public Schedule getSchedule(final int projectId, final String flowId) throws ScheduleManagerException { updateLocal(); return this.scheduleIdentityPairMap.get(new Pair<>(projectId, flowId)); } /** * Returns the scheduled flow for the scheduleId * * @param scheduleId Schedule ID */ public Schedule getSchedule(final int scheduleId) throws ScheduleManagerException { updateLocal(); return this.scheduleIDMap.get(scheduleId); } /** * Removes the flow from the schedule if it exists. */ public synchronized void removeSchedule(final Schedule sched) { final Pair<Integer, String> identityPairMap = sched.getScheduleIdentityPair(); final Schedule schedule = this.scheduleIdentityPairMap.get(identityPairMap); if (schedule != null) { this.scheduleIdentityPairMap.remove(identityPairMap); } this.scheduleIDMap.remove(sched.getScheduleId()); try { this.loader.removeSchedule(sched); } catch (final ScheduleManagerException e) { logger.error(e); } } public Schedule scheduleFlow(final int scheduleId, final int projectId, final String projectName, final String flowName, final String status, final long firstSchedTime, final long endSchedTime, final DateTimeZone timezone, final ReadablePeriod period, final long lastModifyTime, final long nextExecTime, final long submitTime, final String submitUser, final ExecutionOptions execOptions) { final Schedule sched = new Schedule(scheduleId, projectId, projectName, flowName, status, firstSchedTime, endSchedTime, timezone, period, lastModifyTime, nextExecTime, submitTime, submitUser, execOptions, null); logger .info("Scheduling flow '" + sched.getScheduleName() + "' for " + this._dateFormat.print(firstSchedTime) + " with a period of " + (period == null ? "(non-recurring)" : period)); insertSchedule(sched); return sched; } public Schedule cronScheduleFlow(final int scheduleId, final int projectId, final String projectName, final String flowName, final String status, final long firstSchedTime, final long endSchedTime, final DateTimeZone timezone, final long lastModifyTime, final long nextExecTime, final long submitTime, final String submitUser, final ExecutionOptions execOptions, final String cronExpression) { final Schedule sched = new Schedule(scheduleId, projectId, projectName, flowName, status, firstSchedTime, endSchedTime, timezone, null, lastModifyTime, nextExecTime, submitTime, submitUser, execOptions, cronExpression); logger .info("Scheduling flow '" + sched.getScheduleName() + "' for " + this._dateFormat.print(firstSchedTime) + " cron Expression = " + cronExpression); insertSchedule(sched); return sched; } /** * Schedules the flow, but doesn't save the schedule afterwards. */ private synchronized void internalSchedule(final Schedule s) { this.scheduleIDMap.put(s.getScheduleId(), s); this.scheduleIdentityPairMap.put(s.getScheduleIdentityPair(), s); } /** * Adds a flow to the schedule. */ public synchronized void insertSchedule(final Schedule s) { final Schedule exist = this.scheduleIdentityPairMap.get(s.getScheduleIdentityPair()); if (s.updateTime()) { try { if (exist == null) { this.loader.insertSchedule(s); internalSchedule(s); } else { s.setScheduleId(exist.getScheduleId()); this.loader.updateSchedule(s); internalSchedule(s); } } catch (final ScheduleManagerException e) { logger.error(e); } } else { logger .error("The provided schedule is non-recurring and the scheduled time already passed. " + s.getScheduleName()); } } @Override public void loadTriggerFromProps(final Props props) throws ScheduleManagerException { throw new ScheduleManagerException("create " + getTriggerSource() + " from json not supported yet"); } @Override public String getTriggerSource() { return SIMPLE_TIME_TRIGGER; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/scheduler/ScheduleManagerException.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.scheduler; public class ScheduleManagerException extends Exception { private static final long serialVersionUID = 1L; public ScheduleManagerException(final String message) { super(message); } public ScheduleManagerException(final String message, final Throwable cause) { super(message, cause); } public ScheduleManagerException(final Exception e) { super(e); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/scheduler/TriggerBasedScheduleLoader.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.scheduler; import azkaban.Constants; import azkaban.trigger.Condition; import azkaban.trigger.ConditionChecker; import azkaban.trigger.Trigger; import azkaban.trigger.TriggerAction; import azkaban.trigger.TriggerManager; import azkaban.trigger.TriggerManagerAdapter; import azkaban.trigger.TriggerManagerException; import azkaban.trigger.builtin.BasicTimeChecker; import azkaban.trigger.builtin.ExecuteFlowAction; import javax.inject.Inject; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.log4j.Logger; public class TriggerBasedScheduleLoader implements ScheduleLoader { private static final Logger logger = Logger .getLogger(TriggerBasedScheduleLoader.class); private final TriggerManagerAdapter triggerManager; private final String triggerSource; private long lastUpdateTime = -1; @Inject public TriggerBasedScheduleLoader(final TriggerManager triggerManager) { this.triggerManager = triggerManager; this.triggerSource = ScheduleManager.SIMPLE_TIME_TRIGGER; } private Trigger scheduleToTrigger(final Schedule s) { final Condition triggerCondition = createTriggerCondition(s); final Condition expireCondition = createExpireCondition(s); final List<TriggerAction> actions = createActions(s); final Trigger t = new Trigger.TriggerBuilder(s.getSubmitUser(), this.triggerSource, triggerCondition, expireCondition, actions) .setSubmitTime(s.getSubmitTime()) .setLastModifyTime(s.getLastModifyTime()) .setId(s.getScheduleId()) .build(); if (s.isRecurring()) { t.setResetOnTrigger(true); } else { t.setResetOnTrigger(false); } return t; } private List<TriggerAction> createActions(final Schedule s) { final List<TriggerAction> actions = new ArrayList<>(); final ExecuteFlowAction executeAct = new ExecuteFlowAction("executeFlowAction", s.getProjectId(), s.getProjectName(), s.getFlowName(), s.getSubmitUser(), s.getExecutionOptions()); actions.add(executeAct); return actions; } private Condition createTriggerCondition(final Schedule s) { final Map<String, ConditionChecker> checkers = new HashMap<>(); final ConditionChecker checker = new BasicTimeChecker("BasicTimeChecker_1", s.getFirstSchedTime(), s.getTimezone(), s.isRecurring(), s.skipPastOccurrences(), s.getPeriod(), s.getCronExpression()); checkers.put(checker.getId(), checker); final String expr = checker.getId() + ".eval()"; final Condition cond = new Condition(checkers, expr); return cond; } private Condition createExpireCondition(final Schedule s) { final Map<String, ConditionChecker> checkers = new HashMap<>(); final ConditionChecker checker = new BasicTimeChecker("EndTimeChecker_1", s.getFirstSchedTime(), s.getTimezone(), s.getEndSchedTime(), false, false, null, null); checkers.put(checker.getId(), checker); final String expr = checker.getId() + ".eval()"; return new Condition(checkers, expr); } @Override public void insertSchedule(final Schedule s) throws ScheduleManagerException { final Trigger t = scheduleToTrigger(s); try { this.triggerManager.insertTrigger(t, t.getSubmitUser()); s.setScheduleId(t.getTriggerId()); } catch (final TriggerManagerException e) { throw new ScheduleManagerException("Failed to insert new schedule!", e); } } @Override public void updateSchedule(final Schedule s) throws ScheduleManagerException { final Trigger t = scheduleToTrigger(s); try { this.triggerManager.updateTrigger(t, t.getSubmitUser()); } catch (final TriggerManagerException e) { throw new ScheduleManagerException("Failed to update schedule!", e); } } private Schedule triggerToSchedule(final Trigger t) throws ScheduleManagerException { final BasicTimeChecker triggerTimeChecker = getBasicTimeChecker( t.getTriggerCondition().getCheckers()); final BasicTimeChecker endTimeChecker = getEndTimeChecker(t); final List<TriggerAction> actions = t.getActions(); ExecuteFlowAction act = null; for (final TriggerAction action : actions) { if (action.getType().equals(ExecuteFlowAction.type)) { act = (ExecuteFlowAction) action; break; } } if (triggerTimeChecker != null && act != null) { return new Schedule(t.getTriggerId(), act.getProjectId(), act.getProjectName(), act.getFlowName(), t.getStatus().toString(), triggerTimeChecker.getFirstCheckTime(), endTimeChecker == null ? Constants.DEFAULT_SCHEDULE_END_EPOCH_TIME : endTimeChecker.getNextCheckTime(), triggerTimeChecker.getTimeZone(), triggerTimeChecker.getPeriod(), t.getLastModifyTime(), triggerTimeChecker.getNextCheckTime(), t.getSubmitTime(), t.getSubmitUser(), act.getExecutionOptions(), triggerTimeChecker.getCronExpression()); } else { logger.error("Failed to parse schedule from trigger!"); throw new ScheduleManagerException( "Failed to parse schedule from trigger!"); } } // expirecheckers or triggerCheckers only have BasicTimeChecker today. This should be refactored in future. private BasicTimeChecker getBasicTimeChecker(final Map<String, ConditionChecker> checkers) { for (final ConditionChecker checker : checkers.values()) { if (checker.getType().equals(BasicTimeChecker.type)) { return (BasicTimeChecker) checker; } } return null; } private BasicTimeChecker getEndTimeChecker(final Trigger t) { if (t.getExpireCondition().getExpression().contains("EndTimeChecker")) { return getBasicTimeChecker(t.getExpireCondition().getCheckers()); } return null; } @Override public void removeSchedule(final Schedule s) throws ScheduleManagerException { try { this.triggerManager.removeTrigger(s.getScheduleId(), s.getSubmitUser()); } catch (final TriggerManagerException e) { throw new ScheduleManagerException(e.getMessage()); } } @Override public void updateNextExecTime(final Schedule s) throws ScheduleManagerException { } @Override public synchronized List<Schedule> loadUpdatedSchedules() throws ScheduleManagerException { final List<Trigger> triggers; try { triggers = this.triggerManager.getTriggerUpdates(this.triggerSource, this.lastUpdateTime); } catch (final TriggerManagerException e) { e.printStackTrace(); throw new ScheduleManagerException(e); } final List<Schedule> schedules = new ArrayList<>(); for (final Trigger t : triggers) { this.lastUpdateTime = Math.max(this.lastUpdateTime, t.getLastModifyTime()); final Schedule s = triggerToSchedule(t); schedules.add(s); logger.info("loaded schedule for " + s.getProjectName() + " (project_ID: " + s.getProjectId() + ")"); } return schedules; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/AbstractServiceServlet.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server; import azkaban.Constants; import java.io.IOException; import java.io.OutputStream; import javax.servlet.ServletConfig; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.codehaus.jackson.map.ObjectMapper; public class AbstractServiceServlet extends HttpServlet { public static final String JSON_MIME_TYPE = "application/json"; private static final long serialVersionUID = 1L; private AzkabanServer application; @Override public void init(final ServletConfig config) throws ServletException { this.application = (AzkabanServer) config.getServletContext() .getAttribute(Constants.AZKABAN_SERVLET_CONTEXT_KEY); if (this.application == null) { throw new IllegalStateException( "No batch application is defined in the servlet context!"); } } protected void writeJSON(final HttpServletResponse resp, final Object obj) throws IOException { resp.setContentType(JSON_MIME_TYPE); final ObjectMapper mapper = new ObjectMapper(); final OutputStream stream = resp.getOutputStream(); mapper.writeValue(stream, obj); } public boolean hasParam(final HttpServletRequest request, final String param) { return request.getParameter(param) != null; } public String getParam(final HttpServletRequest request, final String name) throws ServletException { final String p = request.getParameter(name); if (p == null) { throw new ServletException("Missing required parameter '" + name + "'."); } else { return p; } } public String getParam(final HttpServletRequest request, final String name, final String defaultVal) { final String p = request.getParameter(name); if (p == null) { return defaultVal; } return p; } public int getIntParam(final HttpServletRequest request, final String name) throws ServletException { final String p = getParam(request, name); return Integer.parseInt(p); } public int getIntParam(final HttpServletRequest request, final String name, final int defaultVal) { if (hasParam(request, name)) { try { return getIntParam(request, name); } catch (final Exception e) { return defaultVal; } } return defaultVal; } public long getLongParam(final HttpServletRequest request, final String name) throws ServletException { final String p = getParam(request, name); return Long.parseLong(p); } public long getLongParam(final HttpServletRequest request, final String name, final long defaultVal) { if (hasParam(request, name)) { try { return getLongParam(request, name); } catch (final Exception e) { return defaultVal; } } return defaultVal; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/AzkabanServer.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server; import static azkaban.Constants.DEFAULT_PORT_NUMBER; import static azkaban.Constants.DEFAULT_SSL_PORT_NUMBER; import azkaban.Constants; import azkaban.server.session.SessionCache; import azkaban.user.UserManager; import azkaban.utils.Props; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Arrays; import joptsimple.OptionParser; import joptsimple.OptionSet; import joptsimple.OptionSpec; import org.apache.log4j.Logger; import org.apache.velocity.app.VelocityEngine; public abstract class AzkabanServer { private static final Logger logger = Logger.getLogger(AzkabanServer.class); private static Props azkabanProperties = null; public static Props loadProps(final String[] args) { azkabanProperties = loadProps(args, new OptionParser()); return azkabanProperties; } public static Props getAzkabanProperties() { return azkabanProperties; } public static Props loadProps(final String[] args, final OptionParser parser) { final OptionSpec<String> configDirectory = parser.acceptsAll( Arrays.asList("c", "conf"), "The conf directory for Azkaban.") .withRequiredArg() .describedAs("conf") .ofType(String.class); // Grabbing the azkaban settings from the conf directory. Props azkabanSettings = null; final OptionSet options = parser.parse(args); if (options.has(configDirectory)) { final String path = options.valueOf(configDirectory); logger.info("Loading azkaban settings file from " + path); final File dir = new File(path); if (!dir.exists()) { logger.error("Conf directory " + path + " doesn't exist."); } else if (!dir.isDirectory()) { logger.error("Conf directory " + path + " isn't a directory."); } else { azkabanSettings = loadAzkabanConfigurationFromDirectory(dir); } } else { logger .info("Conf parameter not set, attempting to get value from AZKABAN_HOME env."); azkabanSettings = loadConfigurationFromAzkabanHome(); } if (azkabanSettings != null) { updateDerivedConfigs(azkabanSettings); } return azkabanSettings; } private static void updateDerivedConfigs(final Props azkabanSettings) { final boolean isSslEnabled = azkabanSettings.getBoolean("jetty.use.ssl", true); final int port = isSslEnabled ? azkabanSettings.getInt("jetty.ssl.port", DEFAULT_SSL_PORT_NUMBER) : azkabanSettings.getInt("jetty.port", DEFAULT_PORT_NUMBER); // setting stats configuration for connectors final String hostname = azkabanSettings.getString("jetty.hostname", "localhost"); azkabanSettings.put("server.hostname", hostname); azkabanSettings.put("server.port", port); azkabanSettings.put("server.useSSL", String.valueOf(isSslEnabled)); } public static Props loadAzkabanConfigurationFromDirectory(final File dir) { final File azkabanPrivatePropsFile = new File(dir, Constants.AZKABAN_PRIVATE_PROPERTIES_FILE); final File azkabanPropsFile = new File(dir, Constants.AZKABAN_PROPERTIES_FILE); Props props = null; try { // This is purely optional if (azkabanPrivatePropsFile.exists() && azkabanPrivatePropsFile.isFile()) { logger.info("Loading azkaban private properties file"); props = new Props(null, azkabanPrivatePropsFile); } if (azkabanPropsFile.exists() && azkabanPropsFile.isFile()) { logger.info("Loading azkaban properties file"); props = new Props(props, azkabanPropsFile); } } catch (final FileNotFoundException e) { logger.error("File not found. Could not load azkaban config file", e); } catch (final IOException e) { logger.error("File found, but error reading. Could not load azkaban config file", e); } return props; } /** * Loads the Azkaban property file from the AZKABAN_HOME conf directory * * @return Props instance */ private static Props loadConfigurationFromAzkabanHome() { final String azkabanHome = System.getenv("AZKABAN_HOME"); if (azkabanHome == null) { logger.error("AZKABAN_HOME not set. Will try default."); return null; } if (!new File(azkabanHome).isDirectory() || !new File(azkabanHome).canRead()) { logger.error(azkabanHome + " is not a readable directory."); return null; } final File confPath = new File(azkabanHome, Constants.DEFAULT_CONF_PATH); if (!confPath.exists() || !confPath.isDirectory() || !confPath.canRead()) { logger.error(azkabanHome + " does not contain a readable conf directory."); return null; } return loadAzkabanConfigurationFromDirectory(confPath); } public abstract Props getServerProps(); public abstract SessionCache getSessionCache(); public abstract VelocityEngine getVelocityEngine(); public abstract UserManager getUserManager(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/HttpRequestUtils.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server; import azkaban.executor.DisabledJob; import azkaban.executor.ExecutionOptions; import azkaban.executor.ExecutionOptions.FailureAction; import azkaban.executor.ExecutorManagerException; import azkaban.executor.mail.DefaultMailCreator; import azkaban.user.Permission; import azkaban.user.Permission.Type; import azkaban.user.Role; import azkaban.user.User; import azkaban.user.UserManager; import azkaban.utils.JSONUtils; import java.io.BufferedReader; import java.io.IOException; import java.util.Arrays; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.apache.commons.lang.StringUtils; public class HttpRequestUtils { public static ExecutionOptions parseFlowOptions(final HttpServletRequest req) throws ServletException { final ExecutionOptions execOptions = new ExecutionOptions(); if (hasParam(req, "failureAction")) { final String option = getParam(req, "failureAction"); if (option.equals("finishCurrent")) { execOptions.setFailureAction(FailureAction.FINISH_CURRENTLY_RUNNING); } else if (option.equals("cancelImmediately")) { execOptions.setFailureAction(FailureAction.CANCEL_ALL); } else if (option.equals("finishPossible")) { execOptions.setFailureAction(FailureAction.FINISH_ALL_POSSIBLE); } } if (hasParam(req, "failureEmailsOverride")) { final boolean override = getBooleanParam(req, "failureEmailsOverride", false); execOptions.setFailureEmailsOverridden(override); } if (hasParam(req, "successEmailsOverride")) { final boolean override = getBooleanParam(req, "successEmailsOverride", false); execOptions.setSuccessEmailsOverridden(override); } if (hasParam(req, "failureEmails")) { final String emails = getParam(req, "failureEmails"); if (!emails.isEmpty()) { final String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+"); execOptions.setFailureEmails(Arrays.asList(emailSplit)); } } if (hasParam(req, "successEmails")) { final String emails = getParam(req, "successEmails"); if (!emails.isEmpty()) { final String[] emailSplit = emails.split("\\s*,\\s*|\\s*;\\s*|\\s+"); execOptions.setSuccessEmails(Arrays.asList(emailSplit)); } } if (hasParam(req, "notifyFailureFirst")) { execOptions.setNotifyOnFirstFailure(Boolean.parseBoolean(getParam(req, "notifyFailureFirst"))); } if (hasParam(req, "notifyFailureLast")) { execOptions.setNotifyOnLastFailure(Boolean.parseBoolean(getParam(req, "notifyFailureLast"))); } String concurrentOption = getParam(req, "concurrentOption", "skip"); execOptions.setConcurrentOption(concurrentOption); if (concurrentOption.equals("pipeline")) { final int pipelineLevel = getIntParam(req, "pipelineLevel"); execOptions.setPipelineLevel(pipelineLevel); } else if (concurrentOption.equals("queue")) { // Not yet implemented final int queueLevel = getIntParam(req, "queueLevel", 1); execOptions.setPipelineLevel(queueLevel); } String mailCreator = DefaultMailCreator.DEFAULT_MAIL_CREATOR; if (hasParam(req, "mailCreator")) { mailCreator = getParam(req, "mailCreator"); execOptions.setMailCreator(mailCreator); } final Map<String, String> flowParamGroup = getParamGroup(req, "flowOverride"); execOptions.addAllFlowParameters(flowParamGroup); if (hasParam(req, "disabled")) { final String disabled = getParam(req, "disabled"); if (!disabled.isEmpty()) { // TODO edlu: see if it's possible to pass in the new format final List<DisabledJob> disabledList = DisabledJob.fromDeprecatedObjectList((List < Object >) JSONUtils .parseJSONFromStringQuiet(disabled)); execOptions.setDisabledJobs(disabledList); } } return execOptions; } /** * <pre> * Remove following flow param if submitting user is not an Azkaban admin * FLOW_PRIORITY * USE_EXECUTOR * @param userManager * @param options * @param user * </pre> */ public static void filterAdminOnlyFlowParams(final UserManager userManager, final ExecutionOptions options, final User user) throws ExecutorManagerException { if (options == null || options.getFlowParameters() == null) { return; } final Map<String, String> params = options.getFlowParameters(); // is azkaban Admin if (!hasPermission(userManager, user, Type.ADMIN)) { params.remove(ExecutionOptions.FLOW_PRIORITY); params.remove(ExecutionOptions.USE_EXECUTOR); } else { validateIntegerParam(params, ExecutionOptions.FLOW_PRIORITY); validateIntegerParam(params, ExecutionOptions.USE_EXECUTOR); } } /** * parse a string as number and throws exception if parsed value is not a valid integer * * @throws ExecutorManagerException if paramName is not a valid integer */ public static boolean validateIntegerParam(final Map<String, String> params, final String paramName) throws ExecutorManagerException { if (params != null && params.containsKey(paramName) && !StringUtils.isNumeric(params.get(paramName))) { throw new ExecutorManagerException(paramName + " should be an integer"); } return true; } /** * returns true if user has access of type */ public static boolean hasPermission(final UserManager userManager, final User user, final Permission.Type type) { for (final String roleName : user.getRoles()) { final Role role = userManager.getRole(roleName); if (role.getPermission().isPermissionSet(type) || role.getPermission().isPermissionSet(Permission.Type.ADMIN)) { return true; } } return false; } /** * Checks for the existance of the parameter in the request */ public static boolean hasParam(final HttpServletRequest request, final String param) { return request.getParameter(param) != null; } /** * Retrieves the param from the http servlet request. Will throw an exception if not found */ public static String getParam(final HttpServletRequest request, final String name) throws ServletException { final String p = request.getParameter(name); if (p == null) { throw new ServletException("Missing required parameter '" + name + "'."); } else { return p; } } /** * Retrieves the param from the http servlet request. */ public static String getParam(final HttpServletRequest request, final String name, final String defaultVal) { final String p = request.getParameter(name); if (p == null) { return defaultVal; } return p; } /** * Returns the param and parses it into an int. Will throw an exception if not found, or a parse * error if the type is incorrect. */ public static int getIntParam(final HttpServletRequest request, final String name) throws ServletException { final String p = getParam(request, name); return Integer.parseInt(p); } public static int getIntParam(final HttpServletRequest request, final String name, final int defaultVal) { if (hasParam(request, name)) { try { return getIntParam(request, name); } catch (final Exception e) { return defaultVal; } } return defaultVal; } public static boolean getBooleanParam(final HttpServletRequest request, final String name) throws ServletException { final String p = getParam(request, name); return Boolean.parseBoolean(p); } public static boolean getBooleanParam(final HttpServletRequest request, final String name, final boolean defaultVal) { if (hasParam(request, name)) { try { return getBooleanParam(request, name); } catch (final Exception e) { return defaultVal; } } return defaultVal; } public static long getLongParam(final HttpServletRequest request, final String name) throws ServletException { final String p = getParam(request, name); return Long.valueOf(p); } public static long getLongParam(final HttpServletRequest request, final String name, final long defaultVal) { if (hasParam(request, name)) { try { return getLongParam(request, name); } catch (final Exception e) { return defaultVal; } } return defaultVal; } public static Map<String, String> getParamGroup(final HttpServletRequest request, final String groupName) throws ServletException { final Enumeration<String> enumerate = request.getParameterNames(); final String matchString = groupName + "["; final HashMap<String, String> groupParam = new HashMap<>(); while (enumerate.hasMoreElements()) { final String str = (String) enumerate.nextElement(); if (str.startsWith(matchString)) { groupParam.put(str.substring(matchString.length(), str.length() - 1), request.getParameter(str)); } } return groupParam; } public static Object getJsonBody(final HttpServletRequest request) throws ServletException { try { return JSONUtils.parseJSONFromString(getBody(request)); } catch (IOException e) { throw new ServletException("HTTP Request JSON Body cannot be parsed.", e); } } public static String getBody(final HttpServletRequest request) throws ServletException { try { StringBuffer stringBuffer = new StringBuffer(); String line = null; BufferedReader reader = request.getReader(); while ((line = reader.readLine()) != null) stringBuffer.append(line); return stringBuffer.toString(); } catch (Exception e) { throw new ServletException("HTTP Request Body cannot be parsed.", e); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/IMBeanRegistrable.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server; /** * Interface for MBean Registration */ public interface IMBeanRegistrable { /** * Get MBeanRegistrationManager Instance */ MBeanRegistrationManager getMBeanRegistrationManager(); /** * Function to configure MBean Server */ void configureMBeanServer(); }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/MBeanRegistrationManager.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import javax.management.MBeanAttributeInfo; import javax.management.MBeanInfo; import javax.management.MBeanServer; import javax.management.ObjectName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A class to manager MBean Registration tasks */ public class MBeanRegistrationManager { private static final Logger logger = LoggerFactory.getLogger(MBeanRegistrationManager.class); private List<ObjectName> registeredMBeans = new ArrayList<>(); private MBeanServer mbeanServer = null; public void registerMBean(final String name, final Object mbean) { final Class<?> mbeanClass = mbean.getClass(); final ObjectName mbeanName; try { mbeanName = new ObjectName(mbeanClass.getName() + ":name=" + name); getMbeanServer().registerMBean(mbean, mbeanName); logger.info("Bean " + mbeanClass.getCanonicalName() + " registered."); this.registeredMBeans.add(mbeanName); } catch (final Exception e) { logger.error("Error registering mbean " + mbeanClass.getCanonicalName(), e); } } /** * Get Platform MBeanServer Instance * @return platform MBeanServer Instance */ public MBeanServer getMbeanServer() { return (mbeanServer == null) ? ManagementFactory.getPlatformMBeanServer() : mbeanServer; } /** * Close all registered MBeans */ public void closeMBeans() { try { for (final ObjectName name : registeredMBeans) { getMbeanServer().unregisterMBean(name); logger.info("Jmx MBean " + name.getCanonicalName() + " unregistered."); } } catch (final Exception e) { logger.error("Failed to cleanup MBeanServer", e); } } /** * Get MBean Names * @return list of MBean Names */ public List<ObjectName> getMBeanNames() { return registeredMBeans; } /** * Get MBeanInfo * @param name mbean Name * @return MBeanInfo */ public MBeanInfo getMBeanInfo(final ObjectName name) { try { return getMbeanServer().getMBeanInfo(name); } catch (final Exception e) { logger.error("Load MBean Information Failure", e); return null; } } /** * Get MBean Attribute * @param name mbean name * @param attribute attribute name * @return object of MBean Attribute */ public Object getMBeanAttribute(final ObjectName name, final String attribute) { try { return getMbeanServer().getAttribute(name, attribute); } catch (final Exception e) { logger.error( "Retrieve MBeanServer attribute Failure. " + "ObjectName = " + name.toString() + ", " + "attribute = " + attribute, e); return null; } } /** * Get MBean Result * @param mbeanName mbeanName * @return Map of MBean */ public Map<String, Object> getMBeanResult(final String mbeanName) { final Map<String, Object> ret = new HashMap<>(); try { final ObjectName name = new ObjectName(mbeanName); final MBeanInfo info = getMBeanInfo(name); final MBeanAttributeInfo[] mbeanAttrs = info.getAttributes(); final Map<String, Object> attributes = new TreeMap<>(); for (final MBeanAttributeInfo attrInfo : mbeanAttrs) { final Object obj = getMBeanAttribute(name, attrInfo.getName()); attributes.put(attrInfo.getName(), obj); } ret.put("attributes", attributes); } catch (final Exception e) { logger.error("Invalid MBean Name. name = " + mbeanName, e); ret.put("error", "'" + mbeanName + "' is not a valid mBean name"); } return ret; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/session/Session.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server.session; import azkaban.user.User; import java.util.HashMap; import java.util.Map; /** * Container for the session, mapping session id to user in map */ public class Session { private final User user; private final String sessionId; private final String ip; private final Map<String, Object> sessionData = new HashMap<>(); /** * Constructor for the session */ public Session(final String sessionId, final User user, final String ip) { this.user = user; this.sessionId = sessionId; this.ip = ip; } /** * Returns the User object */ public User getUser() { return this.user; } /** * Returns the sessionId */ public String getSessionId() { return this.sessionId; } public String getIp() { return this.ip; } public void setSessionData(final String key, final Object value) { this.sessionData.put(key, value); } public Object getSessionData(final String key) { return this.sessionData.get(key); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/server/session/SessionCache.java
/* * Copyright 2012 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.server.session; import azkaban.Constants.ConfigurationKeys; import azkaban.user.User; import azkaban.utils.Props; import azkaban.utils.UndefinedPropertyException; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Cache for web session. * * The following global Azkaban properties are used: * <ul> * <li>{@code max.num.sessions} - number of live sessions that Azkaban handles, default is 10000 * <li>{@code session.time.to.live} - number of milliseconds before the session expires, * default 36000000 ms, i.e. 10 hours. * </ul> */ public class SessionCache { private static final int MAX_NUM_SESSIONS = 10000; private static final long DEFAULT_SESSION_TIME_TO_LIVE = 10 * 60 * 60 * 1000L; // 10 hours private final Cache<String, Session> cache; private final long effectiveSessionTimeToLive; private final Optional<Long> maxNumberOfSessionsPerIpPerUser; private static final Logger log = LoggerFactory.getLogger(SessionCache.class); /** * Constructor taking global props. */ @Inject public SessionCache(final Props props) { this.effectiveSessionTimeToLive = props.getLong(ConfigurationKeys.SESSION_TIME_TO_LIVE, DEFAULT_SESSION_TIME_TO_LIVE); Long maxNumberOfSessions; try { maxNumberOfSessions = props.getLong(ConfigurationKeys.MAX_SESSION_NUMBER_PER_IP_PER_USER); } catch (final UndefinedPropertyException exception) { maxNumberOfSessions = null; log.warn("{} is not configured. The number of sessions per IP per user is not being capped.", ConfigurationKeys.MAX_SESSION_NUMBER_PER_IP_PER_USER); } this.maxNumberOfSessionsPerIpPerUser = Optional.ofNullable(maxNumberOfSessions); this.cache = CacheBuilder.newBuilder() .maximumSize(props.getInt("max.num.sessions", MAX_NUM_SESSIONS)) .expireAfterAccess(this.effectiveSessionTimeToLive, TimeUnit.MILLISECONDS) .build(); } public Optional<Long> getMaxNumberOfSessionsPerIpPerUser() { return this.maxNumberOfSessionsPerIpPerUser; } /** * Returns the cached session using the session id. */ public Session getSession(final String sessionId) { final Session elem = this.cache.getIfPresent(sessionId); return elem; } /** * Returns the approximate number of sessions currently be kept. */ public long getSessionCount() { return this.cache.size(); } /** * @return <code>true</code> The number of cached session sharing the same IP and user equals * or greater than the allowed limit if defined; * <code>false</code> otherwise. */ private boolean isViolatingMaxNumberOfSessionPerIpPerUser(final Session session) { if (this.maxNumberOfSessionsPerIpPerUser.isPresent()) { final int duplicateSessionCount = this.getSessionCountByUserByIP(session.getUser(), session.getIp()); return duplicateSessionCount >= this.maxNumberOfSessionsPerIpPerUser.get(); } return false; } /** * Return the number of sessions sharing the given user and ip. */ private int getSessionCountByUserByIP(final User user, final String ip) { // first search for duplicate sessions with the given IP final Set<Session> sessionsWithSameIP = this.findSessionsByIP(ip); // then search for duplicate sessions with the given user int duplicateSessionCount = 0; for (final Session sessionByIP : sessionsWithSameIP) { if (sessionByIP.getUser().equals(user)) { duplicateSessionCount++; } } return duplicateSessionCount; } /** * Adds a session to the cache. * @return <code>true</code> Session is successfully added while not violating the duplicate * IP and user check; * <code>false</code> otherwise. */ public boolean addSession(final Session session) { if (isViolatingMaxNumberOfSessionPerIpPerUser(session)) { return false; } this.cache.put(session.getSessionId(), session); return true; } /** * Removes the session from the cache. */ public void removeSession(final String id) { this.cache.invalidate(id); } /** * Returns sessions whose IP equals to the given IP. */ public Set<Session> findSessionsByIP(final String ip) { final Set<Session> ret = new HashSet<>(); final Map<String, Session> cacheSnapshot = this.cache.asMap(); for (final Entry<String, Session> entry : cacheSnapshot.entrySet()) { if (entry.getValue().getIp().equals(ip)) { ret.add(entry.getValue()); } } return ret; } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/sla/SlaAction.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.sla; /** * SLA actions. */ public enum SlaAction { ALERT, KILL }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/sla/SlaOption.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.sla; import azkaban.executor.ExecutableFlow; import azkaban.sla.SlaType.ComponentType; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; /** * SLA option, which can be associated with a flow or job. */ public class SlaOption { public static final String ALERT_TYPE_EMAIL = "email"; public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow"; public static final String ACTION_ALERT = "SlaAlert"; public static final String ACTION_KILL_JOB = "SlaKillJob"; public static final String WEB_ID = "id"; public static final String WEB_DURATION = "duration"; public static final String WEB_STATUS = "rule"; public static final String WEB_ACTIONS = "actions"; public static final String WEB_ACTION_EMAIL = "EMAIL"; public static final String WEB_ACTION_KILL = "KILL"; private static final char MINUTE_DURATION_UNIT = 'm'; private static final char INVALID_DURATION_UNIT = 'n'; private static final DateTimeFormatter fmt = DateTimeFormat .forPattern("MM/dd, YYYY HH:mm"); final private SlaType type; final private String flowName; final private String jobName; final private Duration duration; final private Set<SlaAction> actions; final private ImmutableList<String> emails; /** * Constructor. * * @param type the SLA type. * @param flowName The name of the flow. * @param jobName The name of the job, if the SLA is for a job. * @param duration The duration (time to wait before the SLA would take effect). * @param actions actions to take for the SLA. * @param emails list of emails to send an alert to, for the SLA. */ public SlaOption(final SlaType type, String flowName, String jobName, Duration duration, Set<SlaAction> actions, List<String> emails) { Preconditions.checkNotNull(type, "type is null"); Preconditions.checkNotNull(actions, "actions is null"); Preconditions.checkState(actions.size() > 0, "An action must be specified for the SLA"); this.type = type; this.flowName = Preconditions.checkNotNull(flowName, "flowName is null"); this.jobName = jobName; this.duration = Preconditions.checkNotNull(duration, "duration is null"); this.actions = ImmutableSet.copyOf(actions); if (emails == null) { this.emails = ImmutableList.of(); } else { this.emails = ImmutableList.copyOf(emails); } } /** * Construct the SLA Option from the original SlaOption, which has been renamed to * {@link SlaOptionDeprecated}. */ public SlaOption(SlaOptionDeprecated slaOption) { String type = slaOption.getType(); switch (type) { case SlaOptionDeprecated.TYPE_FLOW_FINISH: this.type = SlaType.FLOW_FINISH; break; case SlaOptionDeprecated.TYPE_FLOW_SUCCEED: this.type = SlaType.FLOW_SUCCEED; break; case SlaOptionDeprecated.TYPE_JOB_FINISH: this.type = SlaType.JOB_FINISH; break; case SlaOptionDeprecated.TYPE_JOB_SUCCEED: this.type = SlaType.JOB_SUCCEED; break; default: throw new IllegalArgumentException("Unrecognized type " + type); } this.flowName = (String) slaOption.getInfo().get(SlaOptionDeprecated.INFO_FLOW_NAME); this.jobName = (String) slaOption.getInfo().get(SlaOptionDeprecated.INFO_JOB_NAME); this.duration = parseDuration((String) slaOption.getInfo().get(SlaOptionDeprecated .INFO_DURATION)); Set<SlaAction> actions = new HashSet<>(); for (String action : slaOption.getActions()) { switch (action) { case SlaOptionDeprecated.ACTION_ALERT: actions.add(SlaAction.ALERT); break; case SlaOptionDeprecated.ACTION_CANCEL_FLOW: case SlaOptionDeprecated.ACTION_KILL_JOB: actions.add(SlaAction.KILL); break; } } this.actions = ImmutableSet.copyOf(actions); this.emails = ImmutableList.copyOf( (List<String>) slaOption.getInfo().get(SlaOptionDeprecated.INFO_EMAIL_LIST) ); } public static List<Object> convertToObjects(List<SlaOption> slaOptions) { if (slaOptions != null) { final List<Object> slaOptionsObject = new ArrayList<>(); for (final SlaOption sla : slaOptions) { if (sla == null) continue; slaOptionsObject.add(sla.toObject()); } return slaOptionsObject; } return null; } private Duration parseDuration(final String durationStr) { final char durationUnit = durationStr.charAt(durationStr.length() - 1); if (durationStr.equals("null") || durationUnit == INVALID_DURATION_UNIT) { return null; } if (durationUnit != MINUTE_DURATION_UNIT) { throw new IllegalArgumentException("Invalid SLA duration unit '" + durationUnit); } final int durationInt = Integer.parseInt(durationStr.substring(0, durationStr.length() - 1)); return Duration.ofMinutes(durationInt); } private String durationToString(Duration duration) { return Long.toString(duration.toMinutes()) + MINUTE_DURATION_UNIT; } public SlaType getType() { return type; } public String getJobName() { return jobName; } public Duration getDuration() { return duration; } public boolean hasAlert() { return actions.contains(SlaAction.ALERT); } public boolean hasKill() { return actions.contains(SlaAction.KILL); } public String getFlowName() { return flowName; } public List<String> getEmails() { return emails; } /** * Check the SlaType's ComponentType of this SlaOption's * * @param componentType component Type * @return true/false */ public boolean isComponentType (SlaType.ComponentType componentType) { return this.type.getComponent() == componentType; } /** * Convert the SLA option to the original JSON format, used by {@link SlaOptionDeprecated}. * * @return the JSON format for {@link SlaOptionDeprecated}. */ public Map<String, Object> toObject() { final List<String> slaActions = new ArrayList<>(); final Map<String, Object> slaInfo = new HashMap<>(); slaInfo.put(SlaOptionDeprecated.INFO_FLOW_NAME, this.flowName); if (hasAlert()) { slaActions.add(SlaOptionDeprecated.ACTION_ALERT); slaInfo.put(SlaOptionDeprecated.ALERT_TYPE, ALERT_TYPE_EMAIL); } if (hasKill()) { if (this.type.getComponent() == ComponentType.FLOW) { slaActions.add(SlaOptionDeprecated.ACTION_CANCEL_FLOW); } else { // JOB slaActions.add(SlaOptionDeprecated.ACTION_KILL_JOB); } } if (this.type.getComponent() == ComponentType.JOB) { slaInfo.put(SlaOptionDeprecated.INFO_JOB_NAME, this.jobName); } String slaType; switch (this.type) { case FLOW_FINISH: slaType = SlaOptionDeprecated.TYPE_FLOW_FINISH; break; case FLOW_SUCCEED: slaType = SlaOptionDeprecated.TYPE_FLOW_SUCCEED; break; case JOB_FINISH: slaType = SlaOptionDeprecated.TYPE_JOB_FINISH; break; case JOB_SUCCEED: slaType = SlaOptionDeprecated.TYPE_JOB_SUCCEED; break; default: throw new IllegalStateException("unsupported SLA type " + this.type.getName()); } slaInfo.put(SlaOptionDeprecated.INFO_DURATION, durationToString(this.duration)); slaInfo.put(SlaOptionDeprecated.INFO_EMAIL_LIST, emails); SlaOptionDeprecated slaOption = new SlaOptionDeprecated(slaType, slaActions, slaInfo); return slaOption.toObject(); } /** * Convert the original JSON format, used by {@link SlaOptionDeprecated}, to an SLA option. * * @param json the original JSON format for {@link SlaOptionDeprecated}. * @return the SLA option. */ public static SlaOption fromObject(Object json) { return new SlaOption(SlaOptionDeprecated.fromObject(json)); } /** * @return the web object representation for the SLA option. */ public Object toWebObject() { final HashMap<String, Object> slaObj = new HashMap<>(); if (this.type.getComponent() == SlaType.ComponentType.FLOW) { slaObj.put(WEB_ID, ""); } else { slaObj.put(WEB_ID, this.jobName); } slaObj.put(WEB_DURATION, durationToString(this.duration)); slaObj.put(WEB_STATUS, this.type.getStatus().toString()); final List<String> actionsObj = new ArrayList<>(); if (hasAlert()) { actionsObj.add(WEB_ACTION_EMAIL); } if (hasKill()) { actionsObj.add(WEB_ACTION_KILL); } slaObj.put(WEB_ACTIONS, actionsObj); return slaObj; } /** * Construct the message for the SLA. * * @param flow the executable flow. * @return the SLA message. */ public String createSlaMessage(final ExecutableFlow flow) { final int execId = flow.getExecutionId(); final String durationStr = durationToString(this.duration); switch (this.type.getComponent()) { case FLOW: final String basicinfo = "SLA Alert: Your flow " + this.flowName + " failed to " + this.type.getStatus() + " within " + durationStr + "<br/>"; final String expected = "Here are details : <br/>" + "Flow " + this.flowName + " in execution " + execId + " is expected to FINISH within " + durationStr + " from " + fmt.print(new DateTime(flow.getStartTime())) + "<br/>"; final String actual = "Actual flow status is " + flow.getStatus(); return basicinfo + expected + actual; case JOB: return "SLA Alert: Your job " + this.jobName + " failed to " + this.type.getStatus() + " within " + durationStr + " in execution " + execId; default: return "Unrecognized SLA component type " + this.type.getComponent(); } } /** * @param options a list of SLA options. * @return the job level SLA options. */ public static List<SlaOption> getJobLevelSLAOptions(List<SlaOption> options) { return filterSLAOptionsByComponentType(options, ComponentType.JOB); } /** * @param options a list of SLA options. * @return the flow level SLA options. */ public static List<SlaOption> getFlowLevelSLAOptions(List<SlaOption> options) { return filterSLAOptionsByComponentType(options, ComponentType.FLOW); } private static List<SlaOption> filterSLAOptionsByComponentType( List<SlaOption> options, ComponentType componentType) { return options.stream() .filter(option -> option.isComponentType(componentType)) .collect(Collectors.toList()); } /** * Builder for {@link SlaOption}. */ public static class SlaOptionBuilder { final private SlaType type; final private String flowName; private String jobName = null; final private Duration duration; private Set<SlaAction> actions; private ImmutableList<String> emails = null; public SlaOptionBuilder(SlaType type, String flowName, Duration duration) { this.type = type; this.flowName = flowName; this.duration = duration; this.actions = new HashSet<>(); } public SlaOptionBuilder setJobName(String jobName) { this.jobName = jobName; return this; } public SlaOptionBuilder setAlert() { actions.add(SlaAction.ALERT); return this; } public SlaOptionBuilder setKill() { actions.add(SlaAction.KILL); return this; } public SlaOptionBuilder setActions(Set<SlaAction> actions) { this.actions.addAll(actions); return this; } public SlaOptionBuilder setEmails(List<String> emails) { this.emails = ImmutableList.copyOf(emails); return this; } public SlaOption createSlaOption() { return new SlaOption(type, flowName, jobName, duration, actions, emails); } } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/sla/SlaOptionDeprecated.java
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.sla; import azkaban.executor.ExecutableFlow; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * The original version of SlaOption. This has been replaced with a newer version, but is * being kept for backward compatibility, for reading and writing the original version from * the database. */ public class SlaOptionDeprecated { public static final String TYPE_FLOW_FINISH = "FlowFinish"; public static final String TYPE_FLOW_SUCCEED = "FlowSucceed"; public static final String TYPE_JOB_FINISH = "JobFinish"; public static final String TYPE_JOB_SUCCEED = "JobSucceed"; public static final String INFO_DURATION = "Duration"; public static final String INFO_FLOW_NAME = "FlowName"; public static final String INFO_JOB_NAME = "JobName"; public static final String INFO_EMAIL_LIST = "EmailList"; // always alert public static final String ALERT_TYPE = "SlaAlertType"; public static final String ACTION_CANCEL_FLOW = "SlaCancelFlow"; public static final String ACTION_ALERT = "SlaAlert"; public static final String ACTION_KILL_JOB = "SlaKillJob"; private static final DateTimeFormatter fmt = DateTimeFormat .forPattern("MM/dd, YYYY HH:mm"); private String type; private Map<String, Object> info; private List<String> actions; public SlaOptionDeprecated(final String type, final List<String> actions, final Map<String, Object> info) { this.type = type; this.info = info; this.actions = actions; } public static SlaOptionDeprecated fromObject(final Object object) { final HashMap<String, Object> slaObj = (HashMap<String, Object>) object; final String type = (String) slaObj.get("type"); final List<String> actions = (List<String>) slaObj.get("actions"); final Map<String, Object> info = (Map<String, Object>) slaObj.get("info"); return new SlaOptionDeprecated(type, actions, info); } public String getType() { return this.type; } public void setType(final String type) { this.type = type; } public Map<String, Object> getInfo() { return this.info; } public void setInfo(final Map<String, Object> info) { this.info = info; } public List<String> getActions() { return this.actions; } public void setActions(final List<String> actions) { this.actions = actions; } public Map<String, Object> toObject() { final HashMap<String, Object> slaObj = new HashMap<>(); slaObj.put("type", this.type); slaObj.put("info", this.info); slaObj.put("actions", this.actions); return slaObj; } @Override public String toString() { return "Sla of " + getType() + getInfo() + getActions(); } }
0
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban
java-sources/ai/databand/azkaban/azkaban-common/3.90.0/azkaban/sla/SlaType.java
/* * Copyright 2019 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.sla; /** * SLA type -- if SLA is for a flow or job that has succeeded or finished. */ public enum SlaType { FLOW_FINISH("FlowFinish", ComponentType.FLOW, StatusType.FINISH), FLOW_SUCCEED("FlowSucceed", ComponentType.FLOW, StatusType.SUCCEED), JOB_FINISH("JobFinish", ComponentType.JOB, StatusType.FINISH), JOB_SUCCEED("JobSucceed", ComponentType.JOB, StatusType.SUCCEED); /** * The component the SLA is for: a flow or job. */ public enum ComponentType { FLOW, JOB } /** * The status the SLA is for: finish or succeed. */ public enum StatusType { FINISH, SUCCEED } final private String name; final private ComponentType component; final private StatusType status; /** * Constructor. * * @param name the SLA type name. * @param component The component the SLA is for, either flow or job. * @param status the status the SLA is for, either succeed or finish. */ SlaType(String name, ComponentType component, StatusType status) { this.name = name; this.component = component; this.status = status; } public String getName() { return name; } public ComponentType getComponent() { return component; } public StatusType getStatus() { return status; } }