index
int64
repo_id
string
file_path
string
content
string
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/AzkabanFlowSharedProps.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import ai.databand.config.NormalizedProps; import ai.databand.config.PropertiesSource; import azkaban.execapp.FlowRunner; import azkaban.utils.Props; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.Field; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** * Shared properties loader. * Azkaban jobs may be configured using "shared properties" files stored in a flow directory. * Those properties are not available in a runtime during flow start. * This class has to access private methods to load those properties. */ public class AzkabanFlowSharedProps implements PropertiesSource { private static final Logger LOG = LoggerFactory.getLogger(AzkabanFlowSharedProps.class); private final Map<String, String> props; public AzkabanFlowSharedProps(FlowRunner flowRunner) { Map<String, String> azkabanProps = new HashMap<>(); try { Field sharedPropsField = flowRunner.getClass().getDeclaredField("sharedProps"); if (!sharedPropsField.isAccessible()) { sharedPropsField.setAccessible(true); } Map<String, Props> sharedProps = (Map) sharedPropsField.get(flowRunner); for (Props nextProps : sharedProps.values()) { azkabanProps.putAll(nextProps.getFlattened()); } } catch (IllegalAccessException | NoSuchFieldException e) { LOG.error("Unable to load shared properties from the Azkaban Flow", e); } props = new NormalizedProps(azkabanProps).values(); } @Override public Map<String, String> values() { return props; } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/AzkabanProps.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import ai.databand.config.JavaOpts; import ai.databand.config.PropertiesSource; import ai.databand.config.SimpleProps; import azkaban.server.AzkabanServer; import java.util.HashMap; import java.util.Map; import java.util.Optional; public class AzkabanProps implements PropertiesSource { private final Map<String, String> props; public AzkabanProps() { this(new SimpleProps()); } public AzkabanProps(PropertiesSource parent) { props = new HashMap<>(parent.values()); JavaOpts javaOpts = new JavaOpts(AzkabanServer.getAzkabanProperties().getFlattened()); props.putAll(javaOpts.values()); } @Override public Map<String, String> values() { return props; } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/DbndEventReporter.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import ai.databand.azkaban.events.DefaultEvent; import ai.databand.azkaban.events.FlowFinishedEvent; import ai.databand.azkaban.events.FlowStartedEvent; import ai.databand.azkaban.events.JobFinishedEvent; import ai.databand.azkaban.events.JobStartedEvent; import azkaban.event.Event; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.HashMap; import java.util.Map; public class DbndEventReporter { private static final Logger LOG = LoggerFactory.getLogger(DbndEventReporter.class); private final Map<Event.Type, Class<? extends AzkabanEvent>> events; public DbndEventReporter() { events = new HashMap<>(1); events.put(Event.Type.FLOW_STARTED, FlowStartedEvent.class); events.put(Event.Type.FLOW_FINISHED, FlowFinishedEvent.class); events.put(Event.Type.JOB_STARTED, JobStartedEvent.class); events.put(Event.Type.JOB_FINISHED, JobFinishedEvent.class); } public boolean report(Event reportedEvent) { try { Class<? extends AzkabanEvent> type = events.getOrDefault(reportedEvent.getType(), DefaultEvent.class); AzkabanEvent event = type.getConstructor(Event.class).newInstance(reportedEvent); event.track(); } catch (Throwable e) { LOG.error("Unable to track event", e); } return true; } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/agent/AzkabanTransformer.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.agent; import javassist.ClassPool; import javassist.CtClass; import javassist.CtMethod; import javassist.LoaderClassPath; import javassist.bytecode.MethodInfo; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.instrument.ClassFileTransformer; import java.security.ProtectionDomain; import java.util.Optional; public class AzkabanTransformer implements ClassFileTransformer { private static final String TASK_ANNOTATION = "ai.databand.annotations.Task"; private final boolean isVerbose; public AzkabanTransformer(boolean isVerbose) { this.isVerbose = isVerbose; } public byte[] transform(ClassLoader loader, String className, Class classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) { ClassPool cp = ClassPool.getDefault(); cp.appendClassPath(new LoaderClassPath(loader)); Optional<CtClass> ctOpt = classInScope(cp, className, classfileBuffer); if (!ctOpt.isPresent()) { return null; } try { CtClass ct = ctOpt.get(); System.out.printf("Instrumenting class %s%n", className); CtMethod[] declaredMethods = ct.getDeclaredMethods(); for (CtMethod method : declaredMethods) { // wrap methods annotated by @task MethodInfo methodInfo = method.getMethodInfo(); if (method.getName().contains("handleEvent")) { if (isVerbose) { System.out.printf("Instrumenting method %s%n", methodInfo.getName()); } method.insertBefore("{ new ai.databand.azkaban.DbndEventReporter().report($1); }"); break; } } return ct.toBytecode(); } catch (RuntimeException e) { if (e.getMessage() != null && e.getMessage().contains("frozen")) { return null; } } catch (Throwable e) { e.printStackTrace(); } return classfileBuffer; } protected Optional<CtClass> classInScope(ClassPool cp, String className, byte[] classfileBuffer) { if (!className.equalsIgnoreCase("azkaban/execapp/FlowRunnerManager") && !className.contains("azkaban/execapp/FlowRunner$JobRunnerEventListener")) { return Optional.empty(); } try (InputStream is = new ByteArrayInputStream(classfileBuffer)) { CtClass ct = cp.makeClass(is); return Optional.of(ct); } catch (IOException e) { return Optional.empty(); } } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/agent/DbndAgent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.agent; import ai.databand.config.DbndAgentConfig; import java.io.IOException; import java.io.InputStream; import java.lang.instrument.Instrumentation; import java.util.Properties; public class DbndAgent { public static void premain(String agentArgs, Instrumentation inst) { Properties props = new Properties(); try (InputStream input = DbndAgent.class.getClassLoader().getResourceAsStream("application.properties")) { props.load(input); } catch (IOException e) { // shouldn't occur e.printStackTrace(); } System.out.printf("Starting Databand v%s for Azkaban!%n", props.getProperty("version")); // this is workaround for spark-submit case // for some reason CallSite is not loaded during instrumentation phase, so we have to load it before try { Class.forName("java.lang.invoke.CallSite"); } catch (Throwable e) { e.printStackTrace(); } DbndAgentConfig config = new DbndAgentConfig(agentArgs); inst.addTransformer(new AzkabanTransformer(config.isVerbose())); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/DefaultEvent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AzkabanEvent; import azkaban.event.Event; public class DefaultEvent implements AzkabanEvent { public DefaultEvent(Event event) { // do nothing } @Override public void track() { // do nothing } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/FlowFinishedEvent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AgentAzkabanFlow; import ai.databand.azkaban.AzkabanEvent; import ai.databand.azkaban.AzkabanFlow; import ai.databand.azkaban.AzkabanProps; import ai.databand.config.DbndConfig; import ai.databand.config.Env; import ai.databand.config.JavaOpts; import azkaban.event.Event; public class FlowFinishedEvent implements AzkabanEvent { private final AzkabanEvent origin; public FlowFinishedEvent(Event event) { DbndConfig config = new DbndConfig( new Env( new JavaOpts( new AzkabanProps() ) ) ); FlowRunnerContext flowCtx = new FlowRunnerContext(event, config); AzkabanFlow flow = new AgentAzkabanFlow(config, flowCtx); this.origin = new FlowFinished(config, flowCtx.taskContext(), flow); } @Override public void track() { origin.track(); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/FlowRunnerContext.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AgentAzkabanLinks; import ai.databand.azkaban.links.AzkabanLinks; import ai.databand.config.DbndConfig; import ai.databand.schema.AzkabanTaskContext; import azkaban.event.Event; import azkaban.execapp.FlowRunner; import azkaban.executor.ExecutableFlow; import azkaban.flow.CommonJobProperties; import azkaban.flow.Flow; import azkaban.project.JdbcProjectLoader; import azkaban.project.Project; import azkaban.project.ProjectManager; import azkaban.server.AzkabanServer; import azkaban.utils.Props; public class FlowRunnerContext { private final ExecutableFlow executableFlow; private final FlowRunner flowRunner; private final Flow flowDef; private final String flowId; private final String projectName; private final String flowUuid; private final String executionId; private final String startTime; private final String pipelineName; private final DbndConfig config; public FlowRunnerContext(Event event, DbndConfig config) { this.flowRunner = (FlowRunner) event.getRunner(); this.executableFlow = flowRunner.getExecutableFlow(); this.flowId = executableFlow.getId(); this.projectName = executableFlow.getProjectName(); this.flowUuid = executableFlow.getInputProps().get(CommonJobProperties.FLOW_UUID); this.executionId = String.valueOf(executableFlow.getExecutionId()); this.startTime = String.valueOf(event.getTime()); ProjectManager projectManager = new ProjectManager(new JdbcProjectLoader(AzkabanServer.getAzkabanProperties()), AzkabanServer.getAzkabanProperties()); Project project = projectManager.getProject(projectName); this.flowDef = project.getFlow(flowId); this.pipelineName = String.format("%s__%s", projectName, flowId); this.config = config; } public ExecutableFlow executableFlow() { return executableFlow; } public FlowRunner flowRunner() { return flowRunner; } public String startTime() { return startTime; } public AzkabanTaskContext taskContext() { return new AzkabanTaskContext(projectName, flowId, flowUuid, executionId, "", config); } public AzkabanLinks links() { return new AgentAzkabanLinks(executableFlow); } public Flow flowDef() { return flowDef; } public String pipelineName() { return pipelineName; } public String envName() { Props systemProps = AzkabanServer.getAzkabanProperties(); return String.format("%s: %s", systemProps.get("azkaban.name"), systemProps.get("azkaban.label")); } public boolean isTrack() { return false; } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/FlowStartedEvent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AgentAzkabanFlow; import ai.databand.azkaban.AzkabanEvent; import ai.databand.azkaban.AzkabanFlow; import ai.databand.azkaban.AzkabanFlowSharedProps; import ai.databand.azkaban.AzkabanProps; import ai.databand.config.DbndConfig; import ai.databand.config.Env; import ai.databand.config.JavaOpts; import azkaban.event.Event; import azkaban.execapp.FlowRunner; public class FlowStartedEvent implements AzkabanEvent { private final AzkabanEvent origin; public FlowStartedEvent(Event event) { DbndConfig config = new DbndConfig( new Env( new JavaOpts( new AzkabanProps( new AzkabanFlowSharedProps((FlowRunner) event.getRunner()) ) ) ) ); FlowRunnerContext flowCtx = new FlowRunnerContext(event, config); AzkabanFlow flow = new AgentAzkabanFlow(config, flowCtx); this.origin = new FlowStarted(config, flowCtx.taskContext(), flow); } @Override public void track() { origin.track(); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/JobFinishedEvent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AgentAzkabanJob; import ai.databand.azkaban.AzkabanEvent; import ai.databand.azkaban.AzkabanJob; import ai.databand.azkaban.AzkabanProps; import ai.databand.config.DbndConfig; import ai.databand.config.Env; import ai.databand.config.JavaOpts; import ai.databand.schema.AzkabanTaskContext; import azkaban.event.Event; import azkaban.execapp.JobRunner; import azkaban.executor.ExecutableFlow; import azkaban.flow.CommonJobProperties; public class JobFinishedEvent implements AzkabanEvent { private final AzkabanEvent origin; public JobFinishedEvent(Event event) { if (!(event.getRunner() instanceof JobRunner)) { origin = new EmptyEvent(); return; } DbndConfig config = new DbndConfig( new Env( new JavaOpts( new AzkabanProps() ) ) ); JobRunner jobRunner = (JobRunner) event.getRunner(); ExecutableFlow executableFlow = jobRunner.getNode().getExecutableFlow(); String flowName = executableFlow.getId(); String projectName = executableFlow.getProjectName(); String flowUuid = executableFlow.getInputProps().get(CommonJobProperties.FLOW_UUID); String executionId = String.valueOf(executableFlow.getExecutionId()); String startTime = String.valueOf(event.getTime()); AzkabanTaskContext azCtx = new AzkabanTaskContext(projectName, flowName, flowUuid, executionId, jobRunner.getNode().getId(), config); AzkabanJob job = new AgentAzkabanJob( config, jobRunner, startTime ); this.origin = new JobFinished(config, azCtx, job, "Job failed"); } @Override public void track() { origin.track(); } }
0
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban
java-sources/ai/databand/dbnd-azkaban-agent/1.0.28.1/ai/databand/azkaban/events/JobStartedEvent.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban.events; import ai.databand.azkaban.AzkabanEvent; import ai.databand.azkaban.AzkabanProps; import ai.databand.config.DbndConfig; import ai.databand.config.Env; import ai.databand.config.JavaOpts; import ai.databand.schema.AzkabanTaskContext; import azkaban.event.Event; import azkaban.execapp.JobRunner; import azkaban.executor.ExecutableFlow; import azkaban.flow.CommonJobProperties; public class JobStartedEvent implements AzkabanEvent { private final AzkabanEvent origin; public JobStartedEvent(Event event) { if (!(event.getRunner() instanceof JobRunner)) { origin = new EmptyEvent(); return; } DbndConfig config = new DbndConfig( new Env( new JavaOpts( new AzkabanProps() ) ) ); JobRunner jobRunner = (JobRunner) event.getRunner(); ExecutableFlow executableFlow = jobRunner.getNode().getExecutableFlow(); String flowName = executableFlow.getId(); String projectName = executableFlow.getProjectName(); String flowUuid = executableFlow.getInputProps().get(CommonJobProperties.FLOW_UUID); String executionId = String.valueOf(executableFlow.getExecutionId()); AzkabanTaskContext azCtx = new AzkabanTaskContext(projectName, flowName, flowUuid, executionId, jobRunner.getNode().getId(), config); this.origin = new JobStarted(config, azCtx); } @Override public void track() { origin.track(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/ApiWithTokenBuilder.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import ai.databand.config.DbndConfig; import ai.databand.config.DbndSparkConf; import ai.databand.config.Env; import ai.databand.config.JavaOpts; import ai.databand.config.SimpleProps; import ai.databand.schema.auth.CreateTokenReq; import ai.databand.schema.auth.CreateTokenRes; import ai.databand.schema.auth.LoginReq; import ai.databand.schema.auth.LoginRes; import okhttp3.Headers; import retrofit2.Response; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.Objects; import java.util.Optional; /** * This api builder is used in unit tests. * It has to be placed here, because okhttp3 packages has to be relocated to avoid conflicts with Spark distributions. */ public class ApiWithTokenBuilder { public DbndApi api() throws IOException { DbndConfig config = new DbndConfig(); DbndClient dbnd = new DbndClient(config); DbndApi api = dbnd.api(); Response<Void> csrfRes = api.csrfToken().execute(); Optional<String> optionalCookie = csrfRes.headers().values("set-cookie").stream().filter(cookieStr -> cookieStr.contains("X-CSRF-TOKEN")).findFirst(); String csrfToken = optionalCookie.orElseThrow(NullPointerException::new).split(";")[0].split("=")[1]; String cookie = Objects.requireNonNull(csrfRes.headers().get("set-cookie")).concat(";"); Response<LoginRes> loginRes = api.login(new LoginReq(), cookie, csrfToken).execute(); Headers headers = loginRes.headers(); cookie = Objects.requireNonNull(headers.get("set-cookie")).concat(";"); Response<CreateTokenRes> tokenRes = api.createPersonalAccessToken(new CreateTokenReq(), cookie, csrfToken).execute(); CreateTokenRes tokenResBody = tokenRes.body(); Objects.requireNonNull(tokenResBody, "Token response body should not be empty"); String token = tokenResBody.getToken(); String finalCookie = cookie; Map<String, String> tokens = new HashMap<String, String>() {{ put(DbndPropertyNames.DBND__CORE__DATABAND_ACCESS_TOKEN, token); put(DbndPropertyNames.DBND__CSRF_TOKEN, csrfToken); put(DbndPropertyNames.DBND__SESSION_COOKIE, finalCookie); }}; DbndConfig configWithToken = new DbndConfig(new DbndSparkConf( new Env( new JavaOpts( new SimpleProps(tokens) ) ) )); return new DbndClient(configWithToken).api(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndApi.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import ai.databand.schema.AddTaskRuns; import ai.databand.schema.DatasetOperationRes; import ai.databand.schema.GetRunsResponse; import ai.databand.schema.InitRun; import ai.databand.schema.Job; import ai.databand.schema.LogDatasets; import ai.databand.schema.LogMetric; import ai.databand.schema.LogMetrics; import ai.databand.schema.LogTargets; import ai.databand.schema.MetricsForAlertsResponse; import ai.databand.schema.PaginatedData; import ai.databand.schema.SaveExternalLinks; import ai.databand.schema.SaveTaskRunLog; import ai.databand.schema.SetRunState; import ai.databand.schema.TaskFullGraph; import ai.databand.schema.TaskRunAttemptLog; import ai.databand.schema.Tasks; import ai.databand.schema.TasksMetricsRequest; import ai.databand.schema.TasksMetricsResponse; import ai.databand.schema.UpdateTaskRunAttempts; import ai.databand.schema.auth.CreateTokenReq; import ai.databand.schema.auth.CreateTokenRes; import ai.databand.schema.auth.LoginReq; import ai.databand.schema.auth.LoginRes; import ai.databand.schema.tasks.GetTasksReq; import retrofit2.Call; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.POST; import retrofit2.http.Path; import retrofit2.http.Query; import java.util.List; public interface DbndApi { @POST("/api/v1/tracking/init_run") Call<Void> initRun(@Body InitRun data); @POST("/api/v1/tracking/add_task_runs") Call<Void> addTaskRuns(@Body AddTaskRuns data); @POST("/api/v1/tracking/log_metric") Call<Void> logMetric(@Body LogMetric data); @POST("/api/v1/tracking/log_metrics") Call<Void> logMetrics(@Body LogMetrics data); @POST("/api/v1/tracking/save_task_run_log") Call<Void> saveTaskRunLog(@Body SaveTaskRunLog data); @POST("/api/v1/tracking/log_targets") Call<Void> logTargets(@Body LogTargets data); @POST("/api/v1/tracking/log_datasets") Call<Void> logDatasets(@Body LogDatasets data); @POST("/api/v1/tracking/set_run_state") Call<Void> setRunState(@Body SetRunState data); @POST("/api/v1/tracking/update_task_run_attempts") Call<Void> updateTaskRunAttempts(@Body UpdateTaskRunAttempts data); @POST("/api/v1/tracking/save_external_links") Call<Void> saveExternalLinks(@Body SaveExternalLinks data); @POST("/api/v1/auth/login") Call<LoginRes> login(@Body LoginReq data, @Header("Cookie") String cookie, @Header("X-CSRF-Token") String csrfToken); @GET("/api/v1/auth/csrf") Call<Void> csrfToken(); @GET("/api/v1/task/full-graph") Call<TaskFullGraph> taskFullGraph(@Query("job_name") String jobName, @Query("run_uid") String runUid); @GET("/api/v1/runs") Call<GetRunsResponse> runs(@Query("filter") String filter); @POST("/api/v1/task/tasks-metrics") Call<TasksMetricsResponse> tasksMetrics(@Body TasksMetricsRequest data); @GET("/api/v1/jobs") Call<PaginatedData<Job>> jobs(@Query("sort") String filter); @POST("/api/v1/task/tasks") Call<Tasks> tasks(@Body GetTasksReq taskUids); @GET("/api/v1/task/tasks-logs") Call<List<TaskRunAttemptLog>> logs(@Query("attempt_id") Integer taskRunAttemptId); @GET("/api/v1/metrics/for_alerts") Call<MetricsForAlertsResponse> metricsForAlerts(@Query("filter") String filter); @GET("/api/v1/runs/{run_uid}/operations") Call<List<DatasetOperationRes>> operations(@Path("run_uid") String runUid); @POST("/api/v1/auth/personal_access_token") Call<CreateTokenRes> createPersonalAccessToken(@Body CreateTokenReq req, @Header("Cookie") String cookie, @Header("X-CSRF-Token") String csrfToken); }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndApiBuilder.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import ai.databand.config.DbndConfig; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.PropertyNamingStrategy; import okhttp3.OkHttpClient; import okhttp3.Protocol; import okhttp3.Request; import okhttp3.Response; import retrofit2.Retrofit; import retrofit2.converter.jackson.JacksonConverterFactory; import java.util.Collections; import java.util.UUID; import java.util.concurrent.TimeUnit; public class DbndApiBuilder { private final DbndConfig config; public DbndApiBuilder(DbndConfig config) { this.config = config; } public DbndApi build() { ObjectMapper objectMapper = new ObjectMapper(); try { objectMapper.setPropertyNamingStrategy(PropertyNamingStrategy.SNAKE_CASE); } catch (java.lang.NoSuchFieldError e) { // jackson 2.6 used objectMapper.setPropertyNamingStrategy(PropertyNamingStrategy.CAMEL_CASE_TO_LOWER_CASE_WITH_UNDERSCORES); } OkHttpClient.Builder clientBuilder = new OkHttpClient.Builder() .readTimeout(60, TimeUnit.SECONDS) .connectTimeout(60, TimeUnit.SECONDS) .writeTimeout(60, TimeUnit.SECONDS) // we will handle redirects manually .followRedirects(false) .followSslRedirects(false) // enforce HTTP 1 to avoid threads hanging, see https://github.com/square/okhttp/issues/4029 .protocols(Collections.singletonList(Protocol.HTTP_1_1)) .retryOnConnectionFailure(true); /* * If personal access token is enabled then we will add corresponding header into each API call. * Access token should be passed in "Authorization" header. */ if (config.personalAccessToken().isPresent()) { clientBuilder.addInterceptor( chain -> { Request origin = chain.request(); Request withAuth = origin .newBuilder() .addHeader("Authorization", String.format("Bearer %s", config.personalAccessToken().get())) .addHeader("X-Databand-Trace-ID", config.getTraceId()) .addHeader("X-Request-ID", UUID.randomUUID().toString()) .build(); return chain.proceed(withAuth); } ); } /* * If CSRF token is enabled then we will add corresponding header into each API call. * CSRF token should be passed in "X-CSRF-Token" header. */ if (config.csrfToken().isPresent()) { clientBuilder.addInterceptor( chain -> { Request origin = chain.request(); Request withCsrf = origin .newBuilder() .addHeader("X-CSRF-Token", config.csrfToken().get()) .build(); return chain.proceed(withCsrf); } ); } /* * If session cookie is enabled then we will add corresponding header into each API call. * Session cookie should be passed in "Cookie" header. */ if (config.sessionCookie().isPresent()) { clientBuilder.addInterceptor( chain -> { Request origin = chain.request(); Request withSessionCookie = origin .newBuilder() .addHeader("Cookie", config.sessionCookie().get()) .build(); return chain.proceed(withSessionCookie); } ); } /* * OkHttp doesn't do proper redirects on 301: https://github.com/square/okhttp/issues/6627 * This interceptor introduces workaround — if request is being redirected we will handle it manually. */ clientBuilder.addInterceptor( chain -> { Request origin = chain.request(); Response response = chain.proceed(origin); if (!response.isRedirect()) { return response; } String newLocation = response.header("Location"); if (newLocation == null) { return response; } Request withNewLocation = origin .newBuilder() .url(newLocation) .build(); return chain.proceed(withNewLocation); } ); // disabled until we'll figure out way to upgrade okio library // if (config.isVerbose()) { // HttpLoggingInterceptor loggingInterceptor = new HttpLoggingInterceptor(); // loggingInterceptor.setLevel(HttpLoggingInterceptor.Level.BODY); // clientBuilder.addInterceptor(loggingInterceptor); // } OkHttpClient client = clientBuilder.build(); Retrofit.Builder builder = new Retrofit.Builder() .client(client) .baseUrl(config.databandUrl()) .addConverterFactory(JacksonConverterFactory.create(objectMapper)); return builder.build().create(DbndApi.class); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndAppLog.java
/* * © Copyright Databand.ai, an IBM Company 2024-2025 */ package ai.databand; import java.io.PrintStream; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.lang.management.ManagementFactory; public class DbndAppLog { private final org.slf4j.Logger LOG ; public DbndAppLog(final org.slf4j.Logger log4j) { this.LOG = log4j; } //public final static String LOG_PREFIX= "[[==DBND==]] "; // make it clearly visible in Spark logs to not confuse it with regular Spark execution logs public final static String LOG_PREFIX= ""; private static String getClassName(final StackTraceElement ste) { String stFullClassName = ste.getClassName(); return stFullClassName.substring(stFullClassName.lastIndexOf('.') + 1); } private static void printf(final org.slf4j.event.Level lvl, final String msg, final Object... args) { final LocalDateTime dt = LocalDateTime.now(); final String timeStamp = dt.format(DateTimeFormatter.ofPattern("YY/MM/dd HH:mm:ss")); final StackTraceElement[] st = Thread.currentThread().getStackTrace(); final StackTraceElement ste = st[3]; String stClassName = getClassName(ste); if(stClassName.equals(DbndAppLog.class.getSimpleName())) { stClassName = getClassName(st[4]); } final PrintStream outOrErr = lvl == org.slf4j.event.Level.ERROR ? System.err : System.out; final String logInfos = String.format("%s %s %s: %sstdout: ", timeStamp, lvl, stClassName, LOG_PREFIX); outOrErr.printf(logInfos + String.format(msg, args)); } public static void printfln(final org.slf4j.event.Level lvl, final String msg, final Object... args) { printf(lvl, msg + "%n", args); } public void info(final String msg, final Object... args) { LOG.info(DbndAppLog.LOG_PREFIX + msg, args); } public void jvmInfo(final String msg, final Object... args) { LOG.info(DbndAppLog.LOG_PREFIX + "[" + ManagementFactory.getRuntimeMXBean().getName() + "] " + msg, args); } public void warn(final String msg, final Object... args) { LOG.warn(DbndAppLog.LOG_PREFIX + msg, args); } public void error(final String msg, final Object... args) { LOG.error(DbndAppLog.LOG_PREFIX + msg, args); } public void verbose(final String msg, final Object... args) { if(DbndWrapper.instance().config().isVerbose()) { LOG.info(DbndAppLog.LOG_PREFIX + "v " + msg, args); } } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndClient.java
/* * © Copyright Databand.ai, an IBM Company 2022-2024 */ package ai.databand; import ai.databand.config.DbndConfig; import ai.databand.id.Uuid5; import ai.databand.schema.AddTaskRuns; import ai.databand.schema.AirflowTaskContext; import ai.databand.schema.ErrorInfo; import ai.databand.schema.InitRun; import ai.databand.schema.InitRunArgs; import ai.databand.schema.LogDataset; import ai.databand.schema.LogDatasets; import ai.databand.schema.LogMetric; import ai.databand.schema.LogMetrics; import ai.databand.schema.LogTarget; import ai.databand.schema.LogTargets; import ai.databand.schema.Metric; import ai.databand.schema.NewRunInfo; import ai.databand.schema.RootRun; import ai.databand.schema.SaveExternalLinks; import ai.databand.schema.SaveTaskRunLog; import ai.databand.schema.SetRunState; import ai.databand.schema.TaskDefinition; import ai.databand.schema.TaskRun; import ai.databand.schema.TaskRunAttemptUpdate; import ai.databand.schema.TaskRunEnv; import ai.databand.schema.TaskRunsInfo; import ai.databand.schema.TrackingSource; import ai.databand.schema.UpdateTaskRunAttempts; import org.slf4j.LoggerFactory; import retrofit2.Call; import retrofit2.Response; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.ConnectException; import java.net.URLEncoder; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import static java.nio.charset.StandardCharsets.UTF_8; /** * DBND tracking API client. */ public class DbndClient { private static final DbndAppLog LOG = new DbndAppLog(LoggerFactory.getLogger(DbndClient.class)); private final DbndApi dbnd; private final DbndConfig config; public DbndClient(DbndConfig dbndConfig) { config = dbndConfig; dbnd = new DbndApiBuilder(dbndConfig).build(); } /** * Init new DBND run. * * @param jobName job name * @param runId rui id * @param user user * @param taskRunsInfo task runs info * @param airflowTaskContext airflow task context * @param root root run definition * @return runUid of created run */ public String initRun(String jobName, String runId, String user, String runName, TaskRunsInfo taskRunsInfo, AirflowTaskContext airflowTaskContext, RootRun root) { return this.initRun(jobName, runId, user, runName, taskRunsInfo, airflowTaskContext, root, null, null, null); } /** * Init new DBND run. * * @param jobName job name * @param runId rui id * @param user user * @param taskRunsInfo task runs info * @param airflowTaskContext airflow task context * @param root root run definition * @param source tracking source: "airflow" or "azkaban" * @param trackingSource tracking source definition * @return runUid of created run */ public String initRun(String jobName, String runId, String user, String runName, TaskRunsInfo taskRunsInfo, AirflowTaskContext airflowTaskContext, RootRun root, String source, TrackingSource trackingSource, String projectName) { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); String runUid = new Uuid5("RUN_UID", runId).toString(); String driverTaskUid = new Uuid5("DRIVER_TASK", runId).toString(); String taskRunEnvUid = new Uuid5("TASK_RUN_ENV_UID", runId).toString(); String userCodeVersion = new Uuid5("USER_CODE_VERSION", runId).toString(); String machine = ""; String databandVersion = ""; String env = "local"; String cloudType = "local"; RootRun rootRun = root == null ? new RootRun("", null, runUid, null) : root; InitRun data = new InitRun( new InitRunArgs( runUid, rootRun.getRootRunUid(), driverTaskUid, new NewRunInfo( null, env, null, "jvm", now, now, false, "jvm", false, runUid, cloudType, "", runUid, jobName, user, null, runName, "RUNNING", now, rootRun, projectName ), new TaskRunEnv( "None", taskRunEnvUid, user, userCodeVersion, machine, "", now, databandVersion, "/", true ), taskRunsInfo, airflowTaskContext, source, trackingSource ) ); Call<Void> call = dbnd.initRun(data); Optional<Object> res = safeExecuteVoid(call); String projectNameOrDefault = projectName == null? "default":projectName; if (res.isPresent()) { LOG.info("[root_run_uid: {}, pipeline_name: {}, run_name: {}, project_name: {}]", runUid, jobName, runName,projectNameOrDefault); String runLink = getDbndRunLink(jobName,runUid); if (!runLink.isEmpty()) { LOG.info("Dbnd Started tracking, Run can be view at: {}", runLink); } return runUid; } else { LOG.error("[root_run_uid: {}, pipeline_name: {}, run_name: {}, project_name: {}] init_run HTTP request to tracker failed", runUid, jobName, runName, projectNameOrDefault); } throw new RuntimeException("Unable to init run because HTTP request to the tracker failed. " + "Check messages above for error details. " + "Your run will continue but it won't be tracked by Databand"); } /** * Add task runs (method executions). * * @param rootRunUid root run uid to add tasks to * @param runId run id * @param taskRuns task graph * @param taskDefinitions task definitions list * @param parentChildMap parent-child map * @param upstreamsMap upstreams map * @return TaskRunsInfo object */ public TaskRunsInfo addTaskRuns(String rootRunUid, String runId, List<TaskRun> taskRuns, List<TaskDefinition> taskDefinitions, List<List<String>> parentChildMap, List<List<String>> upstreamsMap) { String taskRunEnvUid = new Uuid5("TASK_RUN_ENV_UID", runId).toString(); TaskRunsInfo taskRunsInfo = new TaskRunsInfo( taskRunEnvUid, parentChildMap, rootRunUid, taskRuns, Collections.emptyList(), rootRunUid, upstreamsMap, true, taskDefinitions, null ); Call<Void> call = dbnd.addTaskRuns(new AddTaskRuns(taskRunsInfo)); Optional<Object> res = safeExecuteVoid(call); if (res.isPresent()) { for (TaskRun task : taskRuns) { LOG.info("[task_run_uid: {}, task_name: {}] task created", task.getTaskRunUid(), task.getName()); } } else { LOG.error("[root_run_uid: {}] unable to add tasks", rootRunUid); } return taskRunsInfo; } /** * Update task run attempts with given state. * * @param taskRunUid task run UID * @param taskRunAttemptUid task run attempt UID * @param state state: RUNNING, FAILED, SUCCESS * @param errorInfo error details in case of failure * @param startDate task start date—required for proper task duration calculation */ public void updateTaskRunAttempt(String taskRunUid, String taskRunAttemptUid, String state, ErrorInfo errorInfo, ZonedDateTime startDate) { updateTaskRunAttempt(taskRunUid, taskRunAttemptUid, state, errorInfo, startDate, null); } /** * Update task run attempts with given state. * * @param taskRunUid task run UID * @param taskRunAttemptUid task run attempt UID * @param state state: RUNNING, FAILED, SUCCESS * @param errorInfo error details in case of failure * @param startDate task start date—required for proper task duration calculation * @param linksDict external links, e.g. Airflow or Azkaban run, Spark History server */ public void updateTaskRunAttempt(String taskRunUid, String taskRunAttemptUid, String state, ErrorInfo errorInfo, ZonedDateTime startDate, Map<String, String> linksDict) { UpdateTaskRunAttempts taskRunAttempts = new UpdateTaskRunAttempts( Collections.singletonList( new TaskRunAttemptUpdate( taskRunUid, taskRunAttemptUid, state, ZonedDateTime.now(ZoneOffset.UTC), startDate, errorInfo, linksDict ) ) ); Call<Void> call = dbnd.updateTaskRunAttempts(taskRunAttempts); Optional<Object> res = safeExecuteVoid(call); if (res.isPresent()) { LOG.info("[task_run_uid: {}, task_run_attempt_uid: {}] task updated with state [{}]", taskRunUid, taskRunAttemptUid, state); } else { LOG.error("[task_run_uid: {}, task_run_attempt_uid: {}] unable to update task with state [{}]", taskRunUid, taskRunAttemptUid, state); } } /** * Set run state. * * @param runUid task run UID * @param state state: RUNNING, FAILED, SUCCESS */ public void setRunState(String runUid, String state) { SetRunState data = new SetRunState( runUid, state, ZonedDateTime.now(ZoneOffset.UTC) ); Call<Void> call = dbnd.setRunState(data); Optional<Object> res = safeExecuteVoid(call); if (res.isPresent()) { LOG.info("[root_run_uid: {}] run state set to [{}]", runUid, state); } else { LOG.error("[root_run_uid: {}] unable to set run state to [{}]", runUid, state); } } /** * Log task metrics. * * @param taskRun task run * @param key metric key * @param value metric value * @param source metric source, e.g. "user", "system", "spark" */ public void logMetric(TaskRun taskRun, String key, String value, String source) { logMetrics(taskRun, Collections.singletonMap(key, value), source); } private final static int MAX_METRICS_TO_DISPLAY = 10; /** * Log task metrics. * * @param taskRun task run * @param metrics metrics map * @param source metrics source, e.g. "user", "system", "spark" */ public void logMetrics(TaskRun taskRun, Map<String, Object> metrics, String source) { if (metrics.isEmpty()) { return; } Set<String> metricsKeys = metrics.keySet(); Collection<String> keysToLog = metricsKeys.size() > MAX_METRICS_TO_DISPLAY ? metricsKeys.stream().limit(MAX_METRICS_TO_DISPLAY).collect(Collectors.toList()) : metricsKeys; LOG.info("[task_run_uid: {}, task_name: {}] logging metrics. Total: {}, Keys: {}", taskRun.getTaskRunUid(), taskRun.getName(), metricsKeys.size(), keysToLog); List<LogMetric> metricsInfo = metrics.entrySet().stream().map( m -> new LogMetric( taskRun.getTaskRunAttemptUid(), new Metric( m.getKey(), m.getValue(), ZonedDateTime.now(ZoneOffset.UTC) ), source ) ).collect(Collectors.toList()); Optional<Object> res = safeExecuteVoid(dbnd.logMetrics(new LogMetrics(metricsInfo))); if (res.isPresent()) { LOG.info("[task_run_uid: {}, task_name: {}] metrics logged: Total: {}, Keys: {}", taskRun.getTaskRunUid(), taskRun.getName(), metricsKeys.size(), keysToLog); } else { LOG.error("[task_run_uid: {}, task_name: {}] unable to log metrics", taskRun.getTaskRunUid(), taskRun.getName()); } } /** * Log task targets. * * @param taskRun task run * @param targets targets to log */ public void logTargets(TaskRun taskRun, List<LogTarget> targets) { Optional<Object> res = safeExecuteVoid(dbnd.logTargets(new LogTargets(targets))); if (res.isPresent()) { LOG.info("[task_run_uid: {}, task_name: {}] targets submitted", taskRun.getTaskRunUid(), taskRun.getName()); } else { LOG.error("[task_run_uid: {}, task_name: {}] unable to submit targets", taskRun.getTaskRunUid(), taskRun.getName()); } } /** * Log task dataset operations. * * @param taskRun task run * @param datasets dataset operations to log */ public void logDatasetOperations(TaskRun taskRun, List<LogDataset> datasets) { for (LogDataset op : datasets) { LOG.info("[task_run_uid: {}, task_name: {}] logging dataset operation {}", taskRun.getTaskRunUid(), taskRun.getName(), op); } Optional<Object> res = safeExecuteVoid(dbnd.logDatasets(new LogDatasets(datasets))); if (res.isPresent()) { LOG.info("[task_run_uid: {}, task_name: {}] dataset operations submitted", taskRun.getTaskRunUid(), taskRun.getName()); } else { LOG.error("[task_run_uid: {}, task_name: {}] unable to submit dataset operations", taskRun.getTaskRunUid(), taskRun.getName()); } } /** * Save task run attempt external link. * * @param taskRunAttemptUid task run attempt UID * @param name link name, e.g. "Azkaban execution" * @param url link URL */ public void saveExternalLinks(String taskRunAttemptUid, String name, String url) { Optional<Object> res = safeExecuteVoid(dbnd.saveExternalLinks(new SaveExternalLinks(taskRunAttemptUid, Collections.singletonMap(name, url)))); if (res.isPresent()) { LOG.info("[task_run_attempt_uid: {}] external link saved", taskRunAttemptUid); } else { LOG.error("[task_run_attempt_uid: {}] Unable to save external link", taskRunAttemptUid); } } /** * Save task run attempt external link. * * @param taskRunAttemptUid task run attempt UID * @param linksDict links map */ public void saveExternalLinks(String taskRunAttemptUid, Map<String, String> linksDict) { Optional<Object> res = safeExecuteVoid(dbnd.saveExternalLinks(new SaveExternalLinks(taskRunAttemptUid, linksDict))); if (res.isPresent()) { LOG.info("[task_run_attempt_uid: {}] external link saved", taskRunAttemptUid); } else { LOG.error("[task_run_attempt_uid: {}] Unable to save external link", taskRunAttemptUid); } } /** * Save task logs. * * @param taskRunAttemptUid task run attempt UID * @param logBody log body */ public void saveTaskLog(String taskRunUid, String taskRunAttemptUid, String logBody) { if (logBody == null) { return; } LOG.info("[task_run_uid: {}, task_run_attempt_uid: {}] submitting task execution log (from local logger), total log size: {} characters", taskRunUid, taskRunAttemptUid, logBody.length()); SaveTaskRunLog body = new SaveTaskRunLog(config, taskRunAttemptUid, logBody); Optional<Object> res = safeExecuteVoid(dbnd.saveTaskRunLog(body)); if (res.isPresent()) { LOG.info("[task_run_uid: {}, task_run_attempt_uid: {}] task execution log submitted", taskRunUid, taskRunAttemptUid); } else { LOG.error("[task_run_uid: {}, task_run_attempt_uid: {}] Unable to submit task execution log", taskRunUid, taskRunAttemptUid); } } /** * Wrap retrofit exception and response handling for Void calls. * * @param call prepared Retrofit HTTP call * @param logToStdout set to true if logging to stdout is required, e.g. when log system isn't initialized * @return execute result wrapped in Optional */ protected Optional<Object> safeExecuteVoid(Call<Void> call){ try { if(config.isVerbose()) { LOG.info("v Executing HTTP request to the Databand tracker, URL of the request: {}", call.request().url()); } Response<Void> res = call.execute(); if (res.isSuccessful()) { return Optional.of(new Object()); } else { String errorBody = res.errorBody().string(); String errorMsg = String.format("HTTP request to the Databand tracker '%s' failed: %s %s -\n %s", config.databandUrl(), res.code(), res.message(), errorBody); LOG.error(errorMsg); if (res.code() == 400) { LOG.warn("Check DBND__CORE__DATABAND_ACCESS_TOKEN variable. Looks like token is missing or wrong"); } else if (res.code() == 401) { LOG.warn("Not Authorized. Check DBND__CORE__DATABAND_ACCESS_TOKEN variable. Looks like token is expired"); } else if (res.code() >= 500) { LOG.warn("There is an internal error at Databand Service. Please open a support ticket."); } else { LOG.warn("Make sure Databand tracker is up and running at the {}", config.databandUrl()); } return Optional.empty(); } } catch (ConnectException ex) { LOG.error("Could not connect to the tracking server at {}. " + "Check that server is available and Databand tracker is up and running.\n" + "Exception: {}", config.databandUrl(), ex.getMessage()); return Optional.empty(); } catch (IOException e) { LOG.error("HTTP request to the tracking server at {} failed.\nException: {}", config.databandUrl(), e.getMessage()); return Optional.empty(); } } private String getDbndRunLink(String jobName, String rootRunUid){ StringBuilder res = new StringBuilder(config.databandUrl()); res.append("/app/jobs"); try { res.append("/" + URLEncoder.encode(jobName,UTF_8.toString())); res.append("/" + URLEncoder.encode(rootRunUid,UTF_8.toString())); return res.toString(); } catch (UnsupportedEncodingException e) { // This error might happen in case custom job name can't be encoded as a uri // This may occur in case user has different encoding than UTF-8 and use char that is not valid in UTF-8 // We catch the exception to prevent error propagation, and return an empty string return ""; } } /** * API client. * * @return API client. */ public DbndApi api() { return dbnd; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndLogAppender.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.Logger; import org.apache.log4j.spi.LoggingEvent; public class DbndLogAppender extends AppenderSkeleton { private final DbndWrapper dbndWrapper; public DbndLogAppender(DbndWrapper dbndWrapper) { this.dbndWrapper = dbndWrapper; } public void addAppenders(final String... loggers) { Logger rlog = Logger.getRootLogger(); boolean logAdditivity = rlog.getAdditivity(); if(!logAdditivity) { // Log additivity is "False" on Spark. // We need to enable it or otherwise Spark logs are not correctly collected locally. rlog.setAdditivity(true); } if(loggers.length > 0) { for(String logger : loggers) { Logger.getLogger(logger).addAppender(this); } } else { rlog.addAppender(this); } // restore original value, no side effects rlog.setAdditivity(logAdditivity); } @Override protected void append(LoggingEvent event) { appendInternal(event); } public void appendInternal(LoggingEvent event) { dbndWrapper.logTask(event, layout.format(event)); } @Override public void close() { // do nothing } @Override public boolean requiresLayout() { return true; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndPropertyNames.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; public abstract class DbndPropertyNames { /** * Databand tracker URL. */ public static final String DBND__CORE__DATABAND_URL = "dbnd.core.databand_url"; /** * Databand tracker personal access token. */ public static final String DBND__CORE__DATABAND_ACCESS_TOKEN = "dbnd.core.databand_access_token"; /** * Databand csrf token. */ public static final String DBND__CSRF_TOKEN = "dbnd.csrf_token"; /** * Databand session cookie. */ public static final String DBND__SESSION_COOKIE = "dbnd.session_cookie"; /** * Tracking enabled flag. */ public static final String DBND__TRACKING = "dbnd.tracking"; /** * Tracking enabled flag; */ public static final String DBND__TRACKING__ENABLED = "dbnd.tracking.enabled"; /** * Turn on verbose logging for tracking requests. */ public static final String DBND__VERBOSE = "dbnd.verbose"; /** * Turn of rich data preview (dataframes and histograms). * * @deprecated use DBND__TRACKING__LOG_VALUE_PREVIEW instead */ @Deprecated public static final String DBND__TRACKING__DATA_PREVIEW = "dbnd.tracking.data_preview"; /** * Calculate and log value preview. Can be expensive on Spark. */ public static final String DBND__TRACKING__LOG_VALUE_PREVIEW = "dbnd.tracking.log_value_preview"; /** * Max head size of the log file, bytes to be sent to server. Default 32KiB. */ public static final String DBND__LOG__PREVIEW_HEAD_BYTES = "dbnd.log.preview_head_bytes"; /** * Max tail size of the log file, bytes to be sent to server. Default 32KiB. */ public static final String DBND__LOG__PREVIEW_TAIL_BYTES = "dbnd.log.preview_tail_bytes"; /** * Turn on advanced Spark I/O tracking. */ public static final String DBND__SPARK__IO_TRACKING_ENABLED = "dbnd.spark.io_tracking_enabled"; /** * Project name. */ public static final String DBND__TRACKING__PROJECT = "dbnd.tracking.project"; /** * Override run name. */ public static final String DBND__RUN__JOB_NAME = "dbnd.tracking.job"; /** * Override run name. */ public static final String DBND__RUN__NAME = "dbnd.run_info.name"; /** * List of Azkaban projects to sync. If not specified, all projects will be synced. */ public static final String DBND__AZKABAN__SYNC_PROJECTS = "dbnd.azkaban.sync_projects"; /** * List of Azkaban flows to sync. If not specified, all flows will be synced. */ public static final String DBND__AZKABAN__SYNC_FLOWS = "dbnd.azkaban.sync_flows"; /** * Airflow context env variables. */ public static final String AIRFLOW_CTX_UID = "AIRFLOW_CTX_UID"; public static final String AIRFLOW_CTX_DAG_ID = "AIRFLOW_CTX_DAG_ID"; public static final String AIRFLOW_CTX_EXECUTION_DATE = "AIRFLOW_CTX_EXECUTION_DATE"; public static final String AIRFLOW_CTX_TASK_ID = "AIRFLOW_CTX_TASK_ID"; public static final String AIRFLOW_CTX_TRY_NUMBER = "AIRFLOW_CTX_TRY_NUMBER"; /** * Databand context env variables. */ public static final String DBND_ROOT_RUN_UID = "dbnd_root_run_uid"; public static final String DBND_PARENT_TASK_RUN_UID = "dbnd_parent_task_run_uid"; public static final String DBND_PARENT_TASK_RUN_ATTEMPT_UID = "dbnd_parent_task_run_attempt_uid"; public static final String DBND_TRACE_ID = "dbnd_trace_id"; /** * Databand internal alias. */ public static final String DBND_INTERNAL_ALIAS = "DBND_INTERNAL"; }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndRun.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import ai.databand.log.HistogramRequest; import ai.databand.log.LogDatasetRequest; import ai.databand.schema.ColumnStats; import ai.databand.schema.DatasetOperationStatus; import ai.databand.schema.DatasetOperationType; import ai.databand.schema.TaskRun; import org.apache.log4j.spi.LoggingEvent; import org.apache.spark.scheduler.SparkListenerStageCompleted; import org.apache.spark.sql.Dataset; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import java.util.Optional; /** * DBND run. */ public interface DbndRun { /** * Init run in DBND using pipeline root execution method pointcut. * * @param method * @param args */ void init(Method method, Object[] args); /** * Start task in the run context. * * @param method * @param args */ void startTask(Method method, Object[] args); /** * Set task state to 'error'. * * @param method * @param error */ void errorTask(Method method, Throwable error); /** * Set task state to 'completed'. * * @param method * @param result */ void completeTask(Method method, Object result); /** * Stop run. Set run state to 'completed'. */ void stop(); /** * Submit driver task metrics when run context was external. */ void stopExternal(); /** * When dataset operations was sent after Spark started to initialize shutdown sequence, * we need to send stop signal to the run. */ void stopListener(); /** * Stop run. Set run state to 'failed'. * * @param error */ void error(Throwable error); /** * Log metric and attach it to the current task. * * @param key * @param value */ void logMetric(String key, Object value); /** * Log Spark dataframe * * @param key * @param value * @param histogramRequest */ void logDataframe(String key, Dataset<?> value, HistogramRequest histogramRequest); /** * Log histogram object. * * @param histogram */ void logHistogram(Map<String, Object> histogram); /** * Log dataset operations. * * @param path * @param type * @param status * @param valuePreview * @param dataDimensions * @param dataSchema * @param columnStats */ void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, String valuePreview, String error, List<Long> dataDimensions, Object dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource); /** * Log dataset operations with options like preview and schema generation. * * @param path * @param type * @param status * @param data * @param params */ void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error, LogDatasetRequest params, String operationSource); /** * Log Deequ result * * @param dfName * @param analyzerContext */ // void logDeequResult(String dfName, AnalyzerContext analyzerContext); /** * Log metrics batch and attach it to the current task. * * @param metrics */ void logMetrics(Map<String, Object> metrics); /** * Log metrics batch with source * * @param metrics * @param source */ void logMetrics(Map<String, Object> metrics, String source); /** * Save log and attach it to the current task and all parent tasks. * * @param event * @param formattedEvent */ void saveLog(LoggingEvent event, String formattedEvent); /** * Save spark metrics. * * @param event */ void saveSparkMetrics(SparkListenerStageCompleted event); /** * Extract task name either from method name or annotation value. * * @param method * @return task name extracted from method. */ String getTaskName(Method method); /** * Override task run to avoid creating duplicate runs. * * @param taskRun task run */ void setDriverTask(TaskRun taskRun); /** * Retrieve driver task. * * @return */ Optional<TaskRun> getDriverTask(); }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DbndWrapper.java
/* * © Copyright Databand.ai, an IBM Company 2022-2024 */ package ai.databand; import ai.databand.config.DbndConfig; import ai.databand.log.HistogramRequest; import ai.databand.log.LogDatasetRequest; import ai.databand.schema.ColumnStats; import ai.databand.schema.DatabandTaskContext; import ai.databand.schema.DatasetOperationStatus; import ai.databand.schema.DatasetOperationType; import ai.databand.schema.LogDataset; import ai.databand.schema.TaskRun; import javassist.ClassPool; import javassist.Loader; import org.apache.log4j.Level; import org.apache.log4j.PatternLayout; import org.apache.log4j.spi.LoggingEvent; import org.apache.spark.scheduler.SparkListenerEvent; import org.apache.spark.scheduler.SparkListenerStageCompleted; import org.apache.spark.sql.Dataset; import org.slf4j.LoggerFactory; import java.lang.reflect.Method; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; /** * AspectJ wrapper for @Pipeline and @Task annonations. */ public class DbndWrapper { private static final DbndAppLog LOG = new DbndAppLog(LoggerFactory.getLogger(DbndWrapper.class)); private DbndClient dbnd; private DbndConfig config; // state private final Set<String> loadedClasses; private final Map<String, Method> methodsCache; private DbndRun run; private boolean pipelineInitialized; private final Deque<String> stack; private boolean externalContextSet = false; /** * Indicates that Spark has started shutdown sequence. */ private boolean isSparkShutdown = false; private static final DbndWrapper INSTANCE = new DbndWrapper(); public static DbndWrapper instance() { return INSTANCE; } public DbndWrapper() { initClient(); methodsCache = new HashMap<>(1); stack = new ArrayDeque<>(1); loadedClasses = new HashSet<>(1); // inject log4j logger appender which will capture all log output and will send it to the tracker. String pattern = "[%d] {%c{2}} %p - %m%n"; DbndLogAppender dbndAppender = new DbndLogAppender(this); dbndAppender.setLayout(new PatternLayout(pattern)); dbndAppender.setThreshold(Level.INFO); dbndAppender.activateOptions(); dbndAppender.addAppenders("org.apache.spark", "org.spark_project", "ai.databand"); } protected void initClient() { config = new DbndConfig(); if(config.isVerbose()) { LOG.info("v Parsed Databand properties: {}", config); LOG.info("v Executing Spark submit: {}", config.cmd());; } try { dbnd = new DbndClient(config); } catch (Exception e) { dbnd = null; LOG.error("Unable to initialize DbndClient, tracking will be disabled. Reason: {}", e.getMessage()); config.setTrackingEnabled(false); } } public Optional<Class<?>> loadClass(String className) { try { return Optional.of(Class.forName(className)); } catch (ClassNotFoundException e) { // do nothing, class loader we've got doesn't have pipeline class } try { // try to use Javassist classloader return Optional.of(new Loader(ClassPool.getDefault()).loadClass(className)); } catch (ClassNotFoundException e) { // do nothing } return Optional.empty(); } public void beforePipeline(String className, String methodName, Object[] args) { Method method = findMethodByName(methodName, className); if (method == null) { pipelineInitialized = false; return; } LOG.verbose("Enabled Databand pipeline tracking for class '{}', method '{}' and pushing results to the URL: {}", className, methodName, config.databandUrl()); getOrCreateRun(method, args); pipelineInitialized = true; } protected Method findMethodByName(String methodName, String classname) { if (classname != null && !loadedClasses.contains(classname)) { loadMethods(classname); } String truncated = removeArgsFromMethodName(methodName); for (Map.Entry<String, Method> mthd : methodsCache.entrySet()) { if (mthd.getKey().contains(truncated)) { return mthd.getValue(); } } return null; } /** * Removes arguments part from string representation of method name. * ai.databand.JavaSparkPipeline.execute(java.lang.String) → ai.databand.JavaSparkPipeline.execute( * Opening parent should be present in result because latter it will be used in methods cache calculation * * @param methodName * @return */ protected String removeArgsFromMethodName(String methodName) { int parenIndex = methodName.indexOf("("); return parenIndex > 0 ? methodName.substring(0, parenIndex + 1) : methodName; } protected void loadMethods(String classname) { Optional<Class<?>> pipelineClass = loadClass(classname); if (!pipelineClass.isPresent()) { LOG.error("Unable to build method cache for class {} because it can not be loaded", classname); pipelineInitialized = false; return; } for (Method mthd : pipelineClass.get().getDeclaredMethods()) { String fullMethodName = mthd.toGenericString(); methodsCache.put(fullMethodName, mthd); } loadedClasses.add(classname); } public void afterPipeline() { stop(); cleanup(); } /** * This method is useful when stopping pipelines running from the Databricks Notebooks. * Delay is required because in some cases Listener processing will happen after script completion. * To handle this, delay has to be added so Listener will process all events and then run will be completed. * * @param delayInSeconds */ public void afterPipeline(int delayInSeconds) { LOG.info("Stopping run with delay"); try { TimeUnit.SECONDS.sleep(delayInSeconds); } catch (InterruptedException e) { /// do nothing } stop(); cleanup(); LOG.info("Run stopped"); } /** * Convenient wrapper with the proper naming for using in the Databricks Notebooks instead of cryptic afterPipeline(). */ public void forceStop() { afterPipeline(5); } public void errorPipeline(Throwable error) { currentRun().error(error); cleanup(); } protected void cleanup() { run = null; externalContextSet = false; methodsCache.clear(); pipelineInitialized = false; loadedClasses.clear(); initClient(); } public void beforeTask(String className, String methodName, Object[] args) { if (!pipelineInitialized) { // this is first task, let's initialize pipeline if (stack.isEmpty()) { beforePipeline(className, methodName, args); stack.push(methodName); } else { // main method was loaded by different classloader beforePipeline(className, stack.peek(), args); } return; } DbndRun run = currentRun(); Method method = findMethodByName(methodName, className); LOG.info("Running task {}", run.getTaskName(method)); run.startTask(method, args); stack.push(methodName); } public void afterTask(String methodName, Object result) { stack.pop(); if (stack.isEmpty()) { // this was the last task in stack, e.g. pipeline afterPipeline(); return; } DbndRun run = currentRun(); Method method = findMethodByName(methodName, null); run.completeTask(method, result); LOG.info("Task {} has been completed!", run.getTaskName(method)); } public void errorTask(String methodName, Throwable error) { String poll = stack.pop(); LOG.info("Task {} returned error!", poll); if (stack.isEmpty()) { // this was the last task in stack, e.g. pipeline errorPipeline(error); return; } DbndRun run = currentRun(); Method method = findMethodByName(methodName, null); run.errorTask(method, error); } public void logTask(LoggingEvent event, String eventStr) { DbndRun run = currentRun(); if (run == null) { return; } run.saveLog(event, eventStr); } public void logMetric(String key, Object value) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logMetric(key, value); LOG.info("Metric logged: [{}: {}]", key, value); } public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error, LogDatasetRequest params) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logDatasetOperation(path, type, status, data, error, params, LogDataset.OP_SOURCE_JAVA_MANUAL_LOGGING); LOG.info("Dataset Operation [path: {}], [type: {}], [status: {}] logged", path, type, status); } public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, String valuePreview, List<Long> dataDimensions, String dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logDatasetOperation(path, type, status, valuePreview, null, dataDimensions, dataSchema, withPartition, columnStats, operationSource); LOG.info("Dataset Operation [path: '{}'], [type: {}], [status: {}] logged", path, type, status); if (isSparkShutdown) { // If spark is in the shutdown sequence, pyspark tracking is already completed. // This call ensures Spark Listener will send `stop` signal. LOG.info("Sending \"SUCCESS\" signal to the task run"); run.stopListener(); } } public void logMetrics(Map<String, Object> metrics) { logMetrics(metrics, null); } public void logMetrics(Map<String, Object> metrics, String source) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logMetrics(metrics, source); } public void logDataframe(String key, Dataset<?> value, HistogramRequest histogramRequest) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logDataframe(key, value, histogramRequest); } public void logHistogram(Map<String, Object> histogram) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logHistogram(histogram); } public void logDataframe(String key, Dataset<?> value, boolean withHistograms) { DbndRun run = currentRun(); if (run == null) { run = createAgentlessRun(); } run.logDataframe(key, value, new HistogramRequest(withHistograms)); LOG.info("Dataframe {} logged", key); } public void logSpark(SparkListenerEvent event) { if (run == null) { run = createAgentlessRun(); } if (event instanceof SparkListenerStageCompleted) { run.saveSparkMetrics((SparkListenerStageCompleted) event); LOG.info("Spark metrics received from SparkListener saved"); } } public DbndConfig config() { return config; } // TODO: replace synchronized with better approach to avoid performance bottlenecks private synchronized DbndRun getOrCreateRun(Method method, Object[] args) { if (currentRun() == null) { initRun(method, args); } return currentRun(); } private DbndRun createAgentlessRun() { // add jvm shutdown hook so run will be completed after spark job will stop // hook should be added before, because listener is called asynchronously and spark can initialize stop sequence if (!config.isTrackingEnabled()) { return new NoopDbndRun(); } Runtime.getRuntime().addShutdownHook(new Thread(this::stop)); // check if we're running inside databand task context if (config.databandTaskContext().isPresent()) { // don't init run from the scratch, reuse values run = config.isTrackingEnabled() ? new DefaultDbndRun(dbnd, config) : new NoopDbndRun(); if (!config.isTrackingEnabled()) { LOG.info("Databand tracking is not enabled. Set DBND__TRACKING variable to True if you want to enable it."); } DatabandTaskContext dbndCtx = config.databandTaskContext().get(); TaskRun driverTask = new TaskRun(); driverTask.setRunUid(dbndCtx.getRootRunUid()); driverTask.setTaskRunUid(dbndCtx.getTaskRunUid()); driverTask.setTaskRunAttemptUid(dbndCtx.getTaskRunAttemptUid()); config.airflowContext().ifPresent(ctx -> driverTask.setName(ctx.getTaskId())); run.setDriverTask(driverTask); LOG.info("Reusing existing databand task '{}', taskUid: '{}' ", driverTask.getName(), driverTask.getTaskRunUid()); } else { try { StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); StackTraceElement main = null; for (StackTraceElement el : stackTrace) { if (el.getMethodName().equals("main")) { main = el; break; } } if (main == null) { main = stackTrace[stackTrace.length - 1]; } // workaround to prevent class not found exception when scala is using layered classloader // source: https://github.com/sbt/sbt/issues/4760 Class<?> entryPoint = Class.forName(main.getClassName(), true, Thread.currentThread().getContextClassLoader()); for (Method method : entryPoint.getMethods()) { if (method.getName().contains(main.getMethodName())) { Object[] args = new Object[method.getParameterCount()]; Arrays.fill(args, null); beforePipeline(main.getClassName(), method.getName(), args); break; } } } catch (ClassNotFoundException e) { LOG.error("Class not found: {}", e.getMessage()); // do nothing } } if (Objects.isNull(run)) { // in case pipeline is not annotated and class not found exception initializing run with no args getOrCreateRun(null, null); } return run; } protected void stop() { if (run != null) { // if run is null it means it's already stopped and cleanup() was called run.stop(); } } private void setSparkShutdown() { isSparkShutdown = true; } protected DbndRun currentRun() { return run; } private void initRun(Method method, Object[] args) { run = config.isTrackingEnabled() ? new DefaultDbndRun(dbnd, config) : new NoopDbndRun(); if (!config.isTrackingEnabled()) { LOG.info("Databand tracking is not enabled. Set DBND__TRACKING variable to True if you want to enable it."); return; } try { run.init(method, args); } catch (Exception e) { run = new NoopDbndRun(); LOG.error("Unable to init databand tracking: {}", e); } } protected void printStack() { StringBuilder buffer = new StringBuilder(3); Iterator<String> iterator = stack.iterator(); buffer.append('['); while (iterator.hasNext()) { buffer.append(' '); buffer.append(iterator.next()); buffer.append(' '); } buffer.append(']'); LOG.info(buffer.toString()); } /** * Set tracking context from external source. * This allows us to set context externally (for instance when calling pyspark script) and avoid runs duplication. * TODO: since context can be controlled externally in this way, it may sense to start/stop JVM tasks from the Python * * @param runUid * @param taskRunUid * @param taskRunAttemptUid * @param taskName */ public void setExternalTaskContext(String runUid, String taskRunUid, String taskRunAttemptUid, String taskName) { LOG.info("Setting external task context. run_uid: {}, task_run_uid: {}, task_run_attempt_uid: {}, task_name: {}", runUid, taskRunUid, taskRunAttemptUid, taskName); // we need to check run_uid before setting external context // if it's the same, we'd skip setting // if it's different it means that this run was executed against the already running spark // and we need to set new context if (externalContextSet && runUid.equals(run.getDriverTask().orElse(new TaskRun()).getRunUid())) { // external context was already set // listener will report all dataset ops to the root task // no need to set context again LOG.info("Skipping external context setting because it was already set for this run"); return; } if (!config.isTrackingEnabled()) { run = new NoopDbndRun(); LOG.info("Attempt to set external task context failed: tracking is not enabled"); return; } if (run == null) { run = new DefaultDbndRun(dbnd, config); // before spark will be stopped we have to submit all saved metrics from the last external task Runtime.getRuntime().addShutdownHook(new Thread(run::stopExternal)); // when pyspark is running, py tracking will complete before Spark will start shutdown sequence // Query Listener will still be working during shutdown. We need to know this because listener // has to send signal to Databand Tracker to recalculate dataset operations Runtime.getRuntime().addShutdownHook(new Thread(this::setSparkShutdown)); } // before setting context we should submit all gathered metrics from a previous context run.stopExternal(); // and then set new context TaskRun task = new TaskRun(); task.setRunUid(runUid); task.setTaskRunUid(taskRunUid); task.setTaskRunAttemptUid(taskRunAttemptUid); task.setName(taskName); run.setDriverTask(task); externalContextSet = true; LOG.info("External task context was set. run_uid: {}, task_run_uid: {}, task_run_attempt_uid: {}, task_name: {}", runUid, taskRunUid, taskRunAttemptUid, taskName); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/DefaultDbndRun.java
/* * © Copyright Databand.ai, an IBM Company 2022-2024 */ package ai.databand; import ai.databand.config.DbndConfig; import ai.databand.id.Sha1Long; import ai.databand.id.Sha1Short; import ai.databand.id.Uuid5; import ai.databand.log.HistogramRequest; import ai.databand.log.LogDatasetRequest; import ai.databand.parameters.DatasetOperationPreview; import ai.databand.parameters.Histogram; import ai.databand.parameters.NullPreview; import ai.databand.parameters.ParametersPreview; import ai.databand.parameters.TaskParameterPreview; import ai.databand.schema.AirflowTaskContext; import ai.databand.schema.AzkabanTaskContext; import ai.databand.schema.ColumnStats; import ai.databand.schema.DatasetOperationStatus; import ai.databand.schema.DatasetOperationType; import ai.databand.schema.ErrorInfo; import ai.databand.schema.LogDataset; import ai.databand.schema.LogTarget; import ai.databand.schema.Pair; import ai.databand.schema.RootRun; import ai.databand.schema.RunAndDefinition; import ai.databand.schema.TaskDefinition; import ai.databand.schema.TaskParamDefinition; import ai.databand.schema.TaskRun; import ai.databand.schema.TaskRunParam; import ai.databand.schema.TaskRunsInfo; import ai.databand.schema.TrackingSource; import ai.databand.spark.SparkColumnStats; import org.apache.log4j.spi.LoggingEvent; import org.apache.spark.scheduler.SparkListenerStageCompleted; import org.apache.spark.scheduler.StageInfo; import org.apache.spark.sql.Dataset; import org.apache.spark.util.AccumulatorV2; import org.apache.spark.util.LongAccumulator; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import scala.collection.Iterator; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.lang.annotation.Annotation; import java.lang.reflect.Method; import java.lang.reflect.Parameter; import java.time.ZoneOffset; import java.time.ZonedDateTime; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Deque; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.UUID; @SuppressWarnings("unchecked") public class DefaultDbndRun implements DbndRun { private static final DbndAppLog LOG = new DbndAppLog(LoggerFactory.getLogger(DefaultDbndRun.class)); private final DbndClient dbnd; private final List<TaskRun> taskRuns; private final List<TaskDefinition> taskDefinitions; private final List<List<String>> parentChildMap; private final List<List<String>> upstreamsMap; private final Deque<TaskRun> stack; // todo: methods cache should be extracted to app-level cache private final Map<Method, List<TaskParamDefinition>> methodsCache; private final Map<Method, TaskRun> methodsRunsCache; private final Map<Method, Integer> methodExecutionCounts; private final ParametersPreview parameters; private final Map<Integer, TaskRun> taskRunOutputs; private String rootRunUid; private String runId; private String jobName; private String driverTaskUid; private TaskRun driverTask; private AirflowTaskContext airflowContext; private AzkabanTaskContext azkabanTaskContext; private final DbndConfig config; public DefaultDbndRun(DbndClient dbndClient, DbndConfig config) { this.dbnd = dbndClient; this.taskRuns = new ArrayList<>(1); this.taskDefinitions = new ArrayList<>(1); this.parentChildMap = new ArrayList<>(1); this.upstreamsMap = new ArrayList<>(1); this.stack = new ArrayDeque<>(1); this.methodsCache = new HashMap<>(1); this.methodsRunsCache = new HashMap<>(1); this.methodExecutionCounts = new HashMap<>(1); this.parameters = new ParametersPreview(config.isPreviewEnabled()); this.taskRunOutputs = new HashMap<>(1); this.airflowContext = config.airflowContext().orElse(null); this.azkabanTaskContext = config.azkabanContext().orElse(null); this.config = config; } @Override public void init(Method method, Object[] args) { String annotationValue = getTaskName(method); this.runId = UUID.randomUUID().toString(); String user = System.getProperty("user.name"); String source = "generic_tracking"; TrackingSource trackingSource = null; if (airflowContext != null) { this.jobName = airflowContext.jobName(); source = "airflow_tracking"; trackingSource = new TrackingSource(airflowContext); } else if (azkabanTaskContext != null) { this.jobName = azkabanTaskContext.databandJobName(); trackingSource = azkabanTaskContext.trackingSource(); if (trackingSource != null) { source = "azkaban_tracking"; } } else { this.jobName = annotationValue == null || annotationValue.isEmpty() ? method.getName() : annotationValue; } config.jobName().ifPresent(name -> this.jobName = name); TaskRunsInfo rootRun = buildRootRun(method, args); RootRun root = config.azkabanContext().isPresent() ? config.azkabanContext().get().root() : null; String projectName = config.projectName().orElse(null); this.rootRunUid = dbnd.initRun(jobName, runId, user, config.runName(), rootRun, airflowContext, root, source, trackingSource, projectName); dbnd.setRunState(this.rootRunUid, "RUNNING"); } /** * Builds root run. * * @return */ protected TaskRunsInfo buildRootRun(Method method, Object[] args) { ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); String runUid = new Uuid5("RUN_UID", runId).toString(); driverTaskUid = new Uuid5("DRIVER_TASK", runId).toString(); String taskRunEnvUid = new Uuid5("TASK_RUN_ENV_UID", runId).toString(); String taskRunAttemptUid = new Uuid5("TASK_RUN_ATTEMPT", runId).toString(); String cmd = config.cmd(); String version = ""; Sha1Short taskSignature = new Sha1Short("TASK_SIGNATURE", runId); String taskDefinitionUid = new Uuid5("TASK_DEFINITION", runId).toString(); String taskAfId = getTaskName(method); List<TaskParamDefinition> taskParamDefinitions = buildTaskParamDefinitions(method); String methodName = method == null ? "pipeline" : method.getName(); Pair<List<TaskRunParam>, List<LogTarget>> paramsAndTargets = buildTaskRunParamsAndTargets( method, args, runUid, methodName, taskRunAttemptUid, taskDefinitionUid ); this.driverTask = new TaskRun( runUid, true, false, null, version, driverTaskUid, taskSignature.toString(), jobName, paramsAndTargets.left(), taskSignature.toString(), false, now.toLocalDate(), now, "", "RUNNING", taskDefinitionUid, cmd, false, false, taskRunAttemptUid, taskAfId, airflowContext != null || azkabanTaskContext != null, true, cmd, taskAfId, "jvm", Collections.emptyMap() ); this.driverTask.setStartDate(now); String sourceCode = extractSourceCode(method); TrackingSource trackingSource = null; if (airflowContext != null) { this.parentChildMap.add(Arrays.asList(airflowContext.getAfOperatorUid(), driverTaskUid)); this.upstreamsMap.add(Arrays.asList(airflowContext.getAfOperatorUid(), driverTaskUid)); trackingSource = new TrackingSource(airflowContext); } if (azkabanTaskContext != null) { this.parentChildMap.add(Arrays.asList(azkabanTaskContext.taskRunUid(), driverTaskUid)); this.upstreamsMap.add(Arrays.asList(azkabanTaskContext.taskRunUid(), driverTaskUid)); trackingSource = new TrackingSource(azkabanTaskContext); } return new TaskRunsInfo( taskRunEnvUid, parentChildMap, runUid, Collections.singletonList(driverTask), Collections.emptyList(), runUid, upstreamsMap, false, Collections.singletonList( new TaskDefinition( methodName, sourceCode, new Sha1Long("SOURCE", runId).toString(), "", taskDefinitionUid, new Sha1Long("MODULE_SOURCE", runId).toString(), taskParamDefinitions, "jvm_task", "java", "" ) ), trackingSource ); } // TODO: actual source code protected String extractSourceCode(Method method) { return ""; } @Override public void startTask(Method method, Object[] args) { RunAndDefinition runAndDefinition = buildRunAndDefinition(method, args, !stack.isEmpty()); TaskRun taskRun = runAndDefinition.taskRun(); taskRuns.add(taskRun); TaskDefinition taskDefinition = runAndDefinition.taskDefinition(); taskDefinitions.add(taskDefinition); TaskRun parent = stack.isEmpty() ? driverTask : stack.peek(); // detect nested tasks if (!stack.isEmpty()) { upstreamsMap.add(Arrays.asList(parent.getTaskRunUid(), taskRun.getTaskRunUid())); } taskRun.addUpstream(parent); // detect upstream-downstream relations for (Object arg : args) { if (arg == null) { continue; } TaskRun parentTask = taskRunOutputs.get(arg.hashCode()); if (parentTask != null) { upstreamsMap.add(Arrays.asList(taskRun.getTaskRunUid(), parentTask.getTaskRunUid())); } } stack.push(taskRun); parentChildMap.add(Arrays.asList(parent.getTaskRunUid(), taskRun.getTaskRunUid())); dbnd.addTaskRuns(rootRunUid, runId, taskRuns, taskDefinitions, parentChildMap, upstreamsMap); dbnd.logTargets(taskRun, runAndDefinition.targets()); dbnd.updateTaskRunAttempt(taskRun.getTaskRunUid(), taskRun.getTaskRunAttemptUid(), "RUNNING", null, taskRun.getStartDate()); LOG.info("[task_run_uid: {}, task_name: {}] task tracker url: {}/app/jobs/{}/{}/{}", taskRun.getTaskRunUid(), taskRun.getTaskId(), config.databandUrl(), this.driverTask.getTaskAfId(), this.driverTask.getRunUid(), taskRun.getTaskRunUid() ); } protected List<TaskParamDefinition> buildTaskParamDefinitions(Method method) { if (method == null) { return Collections.emptyList(); } return methodsCache.computeIfAbsent(method, method1 -> { List<TaskParamDefinition> result = new ArrayList<>(method.getParameterCount()); for (int i = 0; i < method.getParameterCount(); i++) { Parameter parameter = method.getParameters()[i]; result.add( new TaskParamDefinition( parameter.getName(), "task_input", "user", true, false, parameter.getParameterizedType().getTypeName(), "", "" ) ); } result.add( new TaskParamDefinition( "result", "task_output", "user", true, false, method.getReturnType().getTypeName(), "", "" ) ); return result; }); } protected Pair<List<TaskRunParam>, List<LogTarget>> buildTaskRunParamsAndTargets(Method method, Object[] args, String taskRunUid, String methodName, String taskRunAttemptUid, String taskDefinitionUid) { if (method == null || args == null || args.length == 0) { return new Pair<>(Collections.emptyList(), Collections.emptyList()); } List<LogTarget> targets = new ArrayList<>(1); List<TaskRunParam> params = new ArrayList<>(method.getParameterCount()); for (int i = 0; i < method.getParameterCount(); i++) { Parameter parameter = method.getParameters()[i]; Object parameterValue = args[i]; TaskParameterPreview preview = parameters.get(parameter.getType()); String compactPreview = preview.compact(parameterValue); params.add( new TaskRunParam( compactPreview, "", parameter.getName() ) ); String targetPath = String.format("%s.%s", method.getName(), parameter.getName()); targets.add( new LogTarget( rootRunUid, taskRunUid, methodName, taskRunAttemptUid, targetPath, parameter.getName(), taskDefinitionUid, "read", "OK", preview.full(parameterValue), preview.dimensions(parameterValue), preview.schema(parameterValue), new Sha1Long("", compactPreview).toString() ) ); } TaskParameterPreview resultPreview = parameters.get(method.getReturnType()); params.add( new TaskRunParam( resultPreview.typeName(method.getReturnType()), "", "result" ) ); return new Pair<>(params, targets); } public String getTaskName(Method method) { if (method == null || method.getName().contains("$anon")) { // we're running from spark-submit return config.sparkAppName(); } Optional<Annotation> taskAnnotation = Arrays.stream(method.getAnnotations()) .filter(at -> at.toString().contains("ai.databand.annotations.Task(value=")) .findAny(); if (!taskAnnotation.isPresent()) { return method.getName(); } String annotationStr = taskAnnotation.get().toString(); String annotationValue = annotationStr.substring(annotationStr.indexOf('=') + 1, annotationStr.indexOf(')')); return annotationValue.isEmpty() ? method.getName() : annotationValue; } protected RunAndDefinition buildRunAndDefinition(Method method, Object[] args, boolean hasUpstreams) { int executionCount = methodExecutionCounts.computeIfAbsent(method, m -> 0); executionCount++; String taskName = getTaskName(method); String methodName = executionCount == 1 ? taskName : String.format("%s_%s", taskName, executionCount); methodExecutionCounts.put(method, executionCount); List<TaskParamDefinition> paramDefinitions = buildTaskParamDefinitions(method); String taskRunId = UUID.randomUUID().toString(); String taskRunUid = new Uuid5("TASK_RUN_UID", taskRunId).toString(); String taskSignature = new Sha1Short("TASK_SIGNATURE" + methodName, runId).toString(); String taskDefinitionUid = new Uuid5("TASK_DEFINITION" + methodName, runId).toString(); String taskRunAttemptUid = new Uuid5("TASK_RUN_ATTEMPT" + methodName, runId).toString(); String taskAfId = methodName; ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); Pair<List<TaskRunParam>, List<LogTarget>> paramsAndTargets = buildTaskRunParamsAndTargets( method, args, taskRunUid, methodName, taskRunAttemptUid, taskDefinitionUid ); List<TaskRunParam> params = paramsAndTargets.left(); List<LogTarget> targets = paramsAndTargets.right(); TaskRun taskRun = new TaskRun( rootRunUid, false, false, null, "", taskRunUid, taskSignature, taskAfId, params, taskSignature, false, now.toLocalDate(), now, "", "QUEUED", taskDefinitionUid, methodName, false, hasUpstreams, taskRunAttemptUid, taskAfId, airflowContext != null, false, methodName, taskAfId, "jvm", Collections.emptyMap() ); TaskDefinition taskDefinition = new TaskDefinition( methodName, "", new Sha1Long("SOURCE", runId).toString(), "", taskDefinitionUid, new Sha1Long("MODULE_SOURCE", runId).toString(), paramDefinitions, "jvm_task", "java", "" ); methodsRunsCache.put(method, taskRun); return new RunAndDefinition(taskRun, taskDefinition, targets); } @Override public void errorTask(Method method, Throwable error) { TaskRun task = stack.pop(); if (task == null) { return; } String stackTrace = extractStackTrace(error); task.appendLog(stackTrace); dbnd.saveTaskLog(task.getTaskRunUid(), task.getTaskRunAttemptUid(), task.getTaskLog()); dbnd.logMetrics(task, task.getMetrics(), "spark"); ErrorInfo errorInfo = new ErrorInfo( error.getLocalizedMessage(), "", false, stackTrace, "", stackTrace, true, error.getClass().getCanonicalName() ); dbnd.updateTaskRunAttempt(task.getTaskRunUid(), task.getTaskRunAttemptUid(), "FAILED", errorInfo, task.getStartDate()); } protected String extractStackTrace(Throwable error) { try (StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw)) { error.printStackTrace(pw); return sw.toString(); } catch (IOException e) { LOG.error("Unable to extract stack trace from error", e); return ""; } } @Override public void completeTask(Method method, Object result) { TaskRun task = stack.pop(); if (task == null) { return; } if (result != null) { TaskParameterPreview taskParameter = parameters.get(result.getClass()); String preview = taskParameter.full(result); taskRunOutputs.put(result.hashCode(), task); dbnd.logTargets( task, Collections.singletonList( new LogTarget( rootRunUid, task.getTaskRunUid(), task.getTaskAfId(), task.getTaskRunAttemptUid(), new Sha1Long("TARGET_PATH", preview).toString(), "result", task.getTaskDefinitionUid(), "write", "OK", preview, taskParameter.dimensions(result), taskParameter.schema(result), new Sha1Long("", preview).toString() ) )); } dbnd.saveTaskLog(task.getTaskRunUid(), task.getTaskRunAttemptUid(), task.getTaskLog()); dbnd.logMetrics(task, task.getMetrics(), "spark"); dbnd.updateTaskRunAttempt(task.getTaskRunUid(), task.getTaskRunAttemptUid(), "SUCCESS", null, task.getStartDate()); } @Override public void stop() { dbnd.saveTaskLog(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), driverTask.getTaskLog()); dbnd.logMetrics(driverTask, driverTask.getMetrics(), "spark"); dbnd.updateTaskRunAttempt(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), "SUCCESS", null, driverTask.getStartDate()); if (rootRunUid == null) { // for agentless runs created inside Databand Context (when root run is outside JVM) we shouldn't complete run return; } dbnd.setRunState(rootRunUid, "SUCCESS"); } @Override public void stopExternal() { if (driverTask == null) { return; } dbnd.saveTaskLog(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), driverTask.getTaskLog()); dbnd.logMetrics(driverTask, driverTask.getMetrics(), "spark"); } /** * Since listener can still process dataset operations AFTER the run completion, * run should be explicitly stopped after listener will complete work * because dataset operations may be missed/not calculated properly. */ @Override public void stopListener() { dbnd.updateTaskRunAttempt(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), "SUCCESS", null, driverTask.getStartDate()); dbnd.setRunState(driverTask.getRunUid(), "SUCCESS"); } public void error(Throwable error) { String stackTrace = extractStackTrace(error); ErrorInfo errorInfo = new ErrorInfo( error.getLocalizedMessage(), "", false, stackTrace, "", stackTrace, false, error.getClass().getCanonicalName() ); driverTask.appendLog(stackTrace); dbnd.saveTaskLog(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), driverTask.getTaskLog()); dbnd.logMetrics(driverTask, driverTask.getMetrics(), "spark"); dbnd.updateTaskRunAttempt(driverTask.getTaskRunUid(), driverTask.getTaskRunAttemptUid(), "FAILED", errorInfo, driverTask.getStartDate()); dbnd.setRunState(rootRunUid, "FAILED"); } @Override public void logMetric(String key, Object value) { TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } this.logMetric(currentTask, key, value, null); } @Override public void logMetrics(Map<String, Object> metrics) { this.logMetrics(metrics, null); } @Override public void logMetrics(Map<String, Object> metrics, String source) { TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } this.logMetrics(currentTask, metrics, source); } @Override public void logDataframe(String key, Dataset<?> value, HistogramRequest histogramRequest) { try { TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } logMetric(currentTask, key, value, "user", false); dbnd.logMetrics(currentTask, new Histogram(key, value, histogramRequest).metricValues(), "histograms"); } catch (Exception e) { LOG.error("Unable to log dataframe", e); } } @Override public void logHistogram(Map<String, Object> histogram) { try { TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } dbnd.logMetrics(currentTask, histogram, "histograms"); } catch (Exception e) { LOG.error("Unable to log histogram", e); } } @Override public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, String error, String valuePreview, List<Long> dataDimensions, Object dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource) { try { TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } dbnd.logDatasetOperations(currentTask, Collections.singletonList( new LogDataset( currentTask, path, type, status, error, valuePreview, dataDimensions, dataSchema, withPartition, columnStats, operationSource ) )); } catch (Exception e) { LOG.error("Unable to log dataset operation", e); } } @Override public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error, LogDatasetRequest params, String operationSource) { TaskParameterPreview preview = params.getWithSchema() ? new DatasetOperationPreview() : new NullPreview(); String errorStr = null; if (error != null) { StringWriter sw = new StringWriter(); try (PrintWriter pw = new PrintWriter(sw)) { error.printStackTrace(pw); errorStr = sw.toString(); } } SparkColumnStats columnStats = new SparkColumnStats(data, params); logDatasetOperation( path, type, status, errorStr, preview.full(data), preview.dimensions(data), preview.schema(data), params.getWithPartition(), columnStats.values(), operationSource ); } public void logMetric(TaskRun taskRun, String key, Object value, String source) { logMetric(taskRun, key, value, source, true); } public void logMetric(TaskRun taskRun, String key, Object value, String source, boolean compact) { try { if (taskRun == null) { return; } TaskParameterPreview taskParameter = parameters.get(value.getClass()); dbnd.logMetric( taskRun, key, compact ? taskParameter.compact(value) : taskParameter.full(value), source ); } catch (Exception e) { LOG.error("Unable to log metric", e); } } public void logMetrics(TaskRun taskRun, Map<String, Object> metrics, String source) { try { if (taskRun == null) { return; } Map<String, Object> result = new HashMap<>(metrics.size()); for (Map.Entry<String, Object> entry : metrics.entrySet()) { TaskParameterPreview taskParameter = parameters.get(entry.getValue().getClass()); result.put(entry.getKey(), taskParameter.compact(entry.getValue())); } dbnd.logMetrics(taskRun, result, source); } catch (Exception e) { LOG.error("Unable to log metrics"); } } @Override public void saveLog(LoggingEvent event, String formattedEvent) { if (!config.isSendingLogs()) { return; } try { if (driverTask == null) { return; } TaskRun currentTask = stack.peek(); if (currentTask == null) { driverTask.appendLog(formattedEvent); } else { currentTask.appendLog(formattedEvent); } } catch (Exception e) { LOG.error("Unable to save task log", e); } } @Override public void saveSparkMetrics(SparkListenerStageCompleted event) { try { StageInfo stageInfo = event.stageInfo(); TaskRun currentTask = stack.peek(); if (currentTask == null) { currentTask = driverTask; } String transformationName = stageInfo.name().substring(0, stageInfo.name().indexOf(' ')); String metricPrefix = String.format("stage-%s.%s.", stageInfo.stageId(), transformationName); Iterator<AccumulatorV2<?, ?>> it = stageInfo.taskMetrics().accumulators().iterator(); Map<String, Object> values = new HashMap<>(1); Map<String, Object> prefixedValues = new HashMap<>(1); while (it.hasNext()) { AccumulatorV2<?, ?> next = it.next(); // we're capturing only numeric values if (!(next instanceof LongAccumulator)) { continue; } String metricName = next.name().get(); String value = String.valueOf(next.value()); prefixedValues.put(metricPrefix + metricName, value); values.put(metricName, value); } currentTask.appendMetrics(values); currentTask.appendPrefixedMetrics(prefixedValues); } catch (Exception e) { LOG.error("Unable to save spark metrics", e); } } public void setDriverTask(TaskRun driverTask) { this.driverTask = driverTask; } @Override public Optional<TaskRun> getDriverTask() { return Optional.ofNullable(driverTask); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/NoopDbndRun.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import ai.databand.log.HistogramRequest; import ai.databand.log.LogDatasetRequest; import ai.databand.schema.ColumnStats; import ai.databand.schema.DatasetOperationStatus; import ai.databand.schema.DatasetOperationType; import ai.databand.schema.TaskRun; import org.apache.log4j.spi.LoggingEvent; import org.apache.spark.scheduler.SparkListenerStageCompleted; import org.apache.spark.sql.Dataset; import java.lang.reflect.Method; import java.util.List; import java.util.Map; import java.util.Optional; /** * No-op run used when no tracking is available to avoid unnecessary exceptions and log pollution. */ public class NoopDbndRun implements DbndRun { @Override public void init(Method method, Object[] args) { // do nothing } @Override public void startTask(Method method, Object[] args) { // do nothing } @Override public void errorTask(Method method, Throwable error) { // do nothing } @Override public void completeTask(Method method, Object result) { // do nothing } @Override public void stop() { // do nothing } @Override public void stopExternal() { // do nothing } @Override public void stopListener() { // do nothing } @Override public void error(Throwable error) { // do nothing } @Override public void logMetric(String key, Object value) { // do nothing } @Override public void logDataframe(String key, Dataset<?> value, HistogramRequest withHistograms) { // do nothing } @Override public void logHistogram(Map<String, Object> histogram) { // do nothing } @Override public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, String error, String valuePreview, List<Long> dataDimensions, Object dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource) { // do nothing } @Override public void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error, LogDatasetRequest params, String operationSource) { // do nothing } @Override public void logMetrics(Map<String, Object> metrics) { // do nothing } @Override public void logMetrics(Map<String, Object> metrics, String source) { // do nothing } @Override public void saveLog(LoggingEvent event, String formattedEvent) { // do nothing } @Override public void saveSparkMetrics(SparkListenerStageCompleted event) { // do nothing } @Override public String getTaskName(Method method) { // dummy return method.getName(); } @Override public void setDriverTask(TaskRun taskRun) { // do nothing } @Override public Optional<TaskRun> getDriverTask() { return Optional.empty(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/RandomNames.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand; import java.security.SecureRandom; /** * Docker names generator, Python port * https://github.com/shamrin/namesgenerator * Copyright (c) 2017 Alexey Shamrin * MIT License */ public class RandomNames { private static final SecureRandom RANDOM = new SecureRandom(); private static final String[] LEFT = new String[]{ "admiring", "adoring", "affectionate", "agitated", "amazing", "angry", "awesome", "blissful", "boring", "brave", "clever", "cocky", "compassionate", "competent", "condescending", "confident", "cranky", "dazzling", "determined", "distracted", "dreamy", "eager", "ecstatic", "elastic", "elated", "elegant", "eloquent", "epic", "fervent", "festive", "flamboyant", "focused", "friendly", "frosty", "gallant", "gifted", "goofy", "gracious", "happy", "hardcore", "heuristic", "hopeful", "hungry", "infallible", "inspiring", "jolly", "jovial", "keen", "kind", "laughing", "loving", "lucid", "mystifying", "modest", "musing", "naughty", "nervous", "nifty", "nostalgic", "objective", "optimistic", "peaceful", "pedantic", "pensive", "practical", "priceless", "quirky", "quizzical", "relaxed", "reverent", "romantic", "sad", "serene", "sharp", "silly", "sleepy", "stoic", "stupefied", "suspicious", "tender", "thirsty", "trusting", "unruffled", "upbeat", "vibrant", "vigilant", "vigorous", "wizardly", "wonderful", "xenodochial", "youthful", "zealous", "zen" }; private static final String[] RIGHT = new String[]{ // Muhammad ibn Jābir al-Ḥarrānī al-Battānī was a founding father of astronomy. https://en.wikipedia.org/wiki/Mu%E1%B8%A5ammad_ibn_J%C4%81bir_al-%E1%B8%A4arr%C4%81n%C4%AB_al-Batt%C4%81n%C4%AB "albattani", // Frances E. Allen, became the first female IBM Fellow in 1989. In 2006, she became the first female recipient of the ACM's Turing Award. https://en.wikipedia.org/wiki/Frances_E._Allen "allen", // June Almeida - Scottish virologist who took the first pictures of the rubella virus - https://en.wikipedia.org/wiki/June_Almeida "almeida", // Maria Gaetana Agnesi - Italian mathematician, philosopher, theologian and humanitarian. She was the first woman to write a mathematics handbook and the first woman appointed as a Mathematics Professor at a University. https://en.wikipedia.org/wiki/Maria_Gaetana_Agnesi "agnesi", // Archimedes was a physicist, engineer and mathematician who invented too many things to list them here. https://en.wikipedia.org/wiki/Archimedes "archimedes", // Maria Ardinghelli - Italian translator, mathematician and physicist - https://en.wikipedia.org/wiki/Maria_Ardinghelli "ardinghelli", // Aryabhata - Ancient Indian mathematician-astronomer during 476-550 CE https://en.wikipedia.org/wiki/Aryabhata "aryabhata", // Wanda Austin - Wanda Austin is the President and CEO of The Aerospace Corporation, a leading architect for the US security space programs. https://en.wikipedia.org/wiki/Wanda_Austin "austin", // Charles Babbage invented the concept of a programmable computer. https://en.wikipedia.org/wiki/Charles_Babbage. "babbage", // Stefan Banach - Polish mathematician, was one of the founders of modern functional analysis. https://en.wikipedia.org/wiki/Stefan_Banach "banach", // John Bardeen co-invented the transistor - https://en.wikipedia.org/wiki/John_Bardeen "bardeen", // Jean Bartik, born Betty Jean Jennings, was one of the original programmers for the ENIAC computer. https://en.wikipedia.org/wiki/Jean_Bartik "bartik", // Laura Bassi, the world's first female professor https://en.wikipedia.org/wiki/Laura_Bassi "bassi", // Hugh Beaver, British engineer, founder of the Guinness Book of World Records https://en.wikipedia.org/wiki/Hugh_Beaver "beaver", // Alexander Graham Bell - an eminent Scottish-born scientist, inventor, engineer and innovator who is credited with inventing the first practical telephone - https://en.wikipedia.org/wiki/Alexander_Graham_Bell "bell", // Karl Friedrich Benz - a German automobile engineer. Inventor of the first practical motorcar. https://en.wikipedia.org/wiki/Karl_Benz "benz", // Homi J Bhabha - was an Indian nuclear physicist, founding director, and professor of physics at the Tata Institute of Fundamental Research. Colloquially known as 'father of Indian nuclear programme'- https://en.wikipedia.org/wiki/Homi_J._Bhabha "bhabha", // Bhaskara II - Ancient Indian mathematician-astronomer whose work on calculus predates Newton and Leibniz by over half a millennium - https://en.wikipedia.org/wiki/Bh%C4%81skara_II//Calculus "bhaskara", // Elizabeth Blackwell - American doctor and first American woman to receive a medical degree - https://en.wikipedia.org/wiki/Elizabeth_Blackwell "blackwell", // Niels Bohr is the father of quantum theory. https://en.wikipedia.org/wiki/Niels_Bohr. "bohr", // Kathleen Booth, she's credited with writing the first assembly language. https://en.wikipedia.org/wiki/Kathleen_Booth "booth", // Anita Borg - Anita Borg was the founding director of the Institute for Women and Technology (IWT). https://en.wikipedia.org/wiki/Anita_Borg "borg", // Satyendra Nath Bose - He provided the foundation for Bose–Einstein statistics and the theory of the Bose–Einstein condensate. - https://en.wikipedia.org/wiki/Satyendra_Nath_Bose "bose", // Evelyn Boyd Granville - She was one of the first African-American woman to receive a Ph.D. in mathematics; she earned it in 1949 from Yale University. https://en.wikipedia.org/wiki/Evelyn_Boyd_Granville "boyd", // Brahmagupta - Ancient Indian mathematician during 598-670 CE who gave rules to compute with zero - https://en.wikipedia.org/wiki/Brahmagupta//Zero "brahmagupta", // Walter Houser Brattain co-invented the transistor - https://en.wikipedia.org/wiki/Walter_Houser_Brattain "brattain", // Emmett Brown invented time travel. https://en.wikipedia.org/wiki/Emmett_Brown (thanks Brian Goff) "brown", // Rachel Carson - American marine biologist and conservationist, her book Silent Spring and other writings are credited with advancing the global environmental movement. https://en.wikipedia.org/wiki/Rachel_Carson "carson", // Subrahmanyan Chandrasekhar - Astrophysicist known for his mathematical theory on different stages and evolution in structures of the stars. He has won nobel prize for physics - https://en.wikipedia.org/wiki/Subrahmanyan_Chandrasekhar "chandrasekhar", // Claude Shannon - The father of information theory and founder of digital circuit design theory. (https://en.wikipedia.org/wiki/Claude_Shannon) "shannon", // Joan Clarke - Bletchley Park code breaker during the Second World War who pioneered techniques that remained top secret for decades. Also an accomplished numismatist https://en.wikipedia.org/wiki/Joan_Clarke "clarke", // Jane Colden - American botanist widely considered the first female American botanist - https://en.wikipedia.org/wiki/Jane_Colden "colden", // Gerty Theresa Cori - American biochemist who became the third woman—and first American woman—to win a Nobel Prize in science, and the first woman to be awarded the Nobel Prize in Physiology or Medicine. Cori was born in Prague. https://en.wikipedia.org/wiki/Gerty_Cori "cori", // Seymour Roger Cray was an American electrical engineer and supercomputer architect who designed a series of computers that were the fastest in the world for decades. https://en.wikipedia.org/wiki/Seymour_Cray "cray", // This entry reflects a husband and wife team who worked together: // Joan Curran was a Welsh scientist who developed radar and invented chaff, a radar countermeasure. https://en.wikipedia.org/wiki/Joan_Curran // Samuel Curran was an Irish physicist who worked alongside his wife during WWII and invented the proximity fuse. https://en.wikipedia.org/wiki/Samuel_Curran "curran", // Marie Curie discovered radioactivity. https://en.wikipedia.org/wiki/Marie_Curie. "curie", // Charles Darwin established the principles of natural evolution. https://en.wikipedia.org/wiki/Charles_Darwin. "darwin", // Leonardo Da Vinci invented too many things to list here. https://en.wikipedia.org/wiki/Leonardo_da_Vinci. "davinci", // Edsger Wybe Dijkstra was a Dutch computer scientist and mathematical scientist. https://en.wikipedia.org/wiki/Edsger_W._Dijkstra. "dijkstra", // Donna Dubinsky - played an integral role in the development of personal digital assistants (PDAs) serving as CEO of Palm, Inc. and co-founding Handspring. https://en.wikipedia.org/wiki/Donna_Dubinsky "dubinsky", // Annie Easley - She was a leading member of the team which developed software for the Centaur rocket stage and one of the first African-Americans in her field. https://en.wikipedia.org/wiki/Annie_Easley "easley", // Thomas Alva Edison, prolific inventor https://en.wikipedia.org/wiki/Thomas_Edison "edison", // Albert Einstein invented the general theory of relativity. https://en.wikipedia.org/wiki/Albert_Einstein "einstein", // Gertrude Elion - American biochemist, pharmacologist and the 1988 recipient of the Nobel Prize in Medicine - https://en.wikipedia.org/wiki/Gertrude_Elion "elion", // Douglas Engelbart gave the mother of all demos: https://en.wikipedia.org/wiki/Douglas_Engelbart "engelbart", // Euclid invented geometry. https://en.wikipedia.org/wiki/Euclid "euclid", // Leonhard Euler invented large parts of modern mathematics. https://de.wikipedia.org/wiki/Leonhard_Euler "euler", // Pierre de Fermat pioneered several aspects of modern mathematics. https://en.wikipedia.org/wiki/Pierre_de_Fermat "fermat", // Enrico Fermi invented the first nuclear reactor. https://en.wikipedia.org/wiki/Enrico_Fermi. "fermi", // Richard Feynman was a key contributor to quantum mechanics and particle physics. https://en.wikipedia.org/wiki/Richard_Feynman "feynman", // Benjamin Franklin is famous for his experiments in electricity and the invention of the lightning rod. "franklin", // Galileo was a founding father of modern astronomy, and faced politics and obscurantism to establish scientific truth. https://en.wikipedia.org/wiki/Galileo_Galilei "galileo", // William Henry 'Bill' Gates III is an American business magnate, philanthropist, investor, computer programmer, and inventor. https://en.wikipedia.org/wiki/Bill_Gates "gates", // Adele Goldberg, was one of the designers and developers of the Smalltalk language. https://en.wikipedia.org/wiki/Adele_Goldberg_(computer_scientist) "goldberg", // Adele Goldstine, born Adele Katz, wrote the complete technical description for the first electronic digital computer, ENIAC. https://en.wikipedia.org/wiki/Adele_Goldstine "goldstine", // Shafi Goldwasser is a computer scientist known for creating theoretical foundations of modern cryptography. Winner of 2012 ACM Turing Award. https://en.wikipedia.org/wiki/Shafi_Goldwasser "goldwasser", // James Golick, all around gangster. "golick", // Jane Goodall - British primatologist, ethologist, and anthropologist who is considered to be the world's foremost expert on chimpanzees - https://en.wikipedia.org/wiki/Jane_Goodall "goodall", // Lois Haibt - American computer scientist, part of the team at IBM that developed FORTRAN - https://en.wikipedia.org/wiki/Lois_Haibt "haibt", // Margaret Hamilton - Director of the Software Engineering Division of the MIT Instrumentation Laboratory, which developed on-board flight software for the Apollo space program. https://en.wikipedia.org/wiki/Margaret_Hamilton_(scientist) "hamilton", // Stephen Hawking pioneered the field of cosmology by combining general relativity and quantum mechanics. https://en.wikipedia.org/wiki/Stephen_Hawking "hawking", // Werner Heisenberg was a founding father of quantum mechanics. https://en.wikipedia.org/wiki/Werner_Heisenberg "heisenberg", // Grete Hermann was a German philosopher noted for her philosophical work on the foundations of quantum mechanics. https://en.wikipedia.org/wiki/Grete_Hermann "hermann", // Jaroslav Heyrovský was the inventor of the polarographic method, father of the electroanalytical method, and recipient of the Nobel Prize in 1959. His main field of work was polarography. https://en.wikipedia.org/wiki/Jaroslav_Heyrovsk%C3%BD "heyrovsky", // Dorothy Hodgkin was a British biochemist, credited with the development of protein crystallography. She was awarded the Nobel Prize in Chemistry in 1964. https://en.wikipedia.org/wiki/Dorothy_Hodgkin "hodgkin", // Erna Schneider Hoover revolutionized modern communication by inventing a computerized telephone switching method. https://en.wikipedia.org/wiki/Erna_Schneider_Hoover "hoover", // Grace Hopper developed the first compiler for a computer programming language and is credited with popularizing the term 'debugging' for fixing computer glitches. https://en.wikipedia.org/wiki/Grace_Hopper "hopper", // Frances Hugle, she was an American scientist, engineer, and inventor who contributed to the understanding of semiconductors, integrated circuitry, and the unique electrical principles of microscopic materials. https://en.wikipedia.org/wiki/Frances_Hugle "hugle", // Hypatia - Greek Alexandrine Neoplatonist philosopher in Egypt who was one of the earliest mothers of mathematics - https://en.wikipedia.org/wiki/Hypatia "hypatia", // Mary Jackson, American mathematician and aerospace engineer who earned the highest title within NASA's engineering department - https://en.wikipedia.org/wiki/Mary_Jackson_(engineer) "jackson", // Yeong-Sil Jang was a Korean scientist and astronomer during the Joseon Dynasty; he invented the first metal printing press and water gauge. https://en.wikipedia.org/wiki/Jang_Yeong-sil "jang", // Betty Jennings - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Jean_Bartik "jennings", // Mary Lou Jepsen, was the founder and chief technology officer of One Laptop Per Child (OLPC), and the founder of Pixel Qi. https://en.wikipedia.org/wiki/Mary_Lou_Jepsen "jepsen", // Katherine Coleman Goble Johnson - American physicist and mathematician contributed to the NASA. https://en.wikipedia.org/wiki/Katherine_Johnson "johnson", // Irène Joliot-Curie - French scientist who was awarded the Nobel Prize for Chemistry in 1935. Daughter of Marie and Pierre Curie. https://en.wikipedia.org/wiki/Ir%C3%A8ne_Joliot-Curie "joliot", // Karen Spärck Jones came up with the concept of inverse document frequency, which is used in most search engines today. https://en.wikipedia.org/wiki/Karen_Sp%C3%A4rck_Jones "jones", // A. P. J. Abdul Kalam - is an Indian scientist aka Missile Man of India for his work on the development of ballistic missile and launch vehicle technology - https://en.wikipedia.org/wiki/A._P._J._Abdul_Kalam "kalam", // Susan Kare, created the icons and many of the interface elements for the original Apple Macintosh in the 1980s, and was an original employee of NeXT, working as the Creative Director. https://en.wikipedia.org/wiki/Susan_Kare "kare", // Mary Kenneth Keller, Sister Mary Kenneth Keller became the first American woman to earn a PhD in Computer Science in 1965. https://en.wikipedia.org/wiki/Mary_Kenneth_Keller "keller", // Johannes Kepler, German astronomer known for his three laws of planetary motion - https://en.wikipedia.org/wiki/Johannes_Kepler "kepler", // Har Gobind Khorana - Indian-American biochemist who shared the 1968 Nobel Prize for Physiology - https://en.wikipedia.org/wiki/Har_Gobind_Khorana "khorana", // Jack Kilby invented silicone integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Jack_Kilby "kilby", // Maria Kirch - German astronomer and first woman to discover a comet - https://en.wikipedia.org/wiki/Maria_Margarethe_Kirch "kirch", // Donald Knuth - American computer scientist, author of 'The Art of Computer Programming' and creator of the TeX typesetting system. https://en.wikipedia.org/wiki/Donald_Knuth "knuth", // Sophie Kowalevski - Russian mathematician responsible for important original contributions to analysis, differential equations and mechanics - https://en.wikipedia.org/wiki/Sofia_Kovalevskaya "kowalevski", // Marie-Jeanne de Lalande - French astronomer, mathematician and cataloguer of stars - https://en.wikipedia.org/wiki/Marie-Jeanne_de_Lalande "lalande", // Hedy Lamarr - Actress and inventor. The principles of her work are now incorporated into modern Wi-Fi, CDMA and Bluetooth technology. https://en.wikipedia.org/wiki/Hedy_Lamarr "lamarr", // Leslie B. Lamport - American computer scientist. Lamport is best known for his seminal work in distributed systems and was the winner of the 2013 Turing Award. https://en.wikipedia.org/wiki/Leslie_Lamport "lamport", // Mary Leakey - British paleoanthropologist who discovered the first fossilized Proconsul skull - https://en.wikipedia.org/wiki/Mary_Leakey "leakey", // Henrietta Swan Leavitt - she was an American astronomer who discovered the relation between the luminosity and the period of Cepheid variable stars. https://en.wikipedia.org/wiki/Henrietta_Swan_Leavitt "leavitt", // Daniel Lewin - Mathematician, Akamai co-founder, soldier, 9/11 victim-- Developed optimization techniques for routing traffic on the internet. Died attempting to stop the 9-11 hijackers. https://en.wikipedia.org/wiki/Daniel_Lewin "lewin", // Ruth Lichterman - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Ruth_Teitelbaum "lichterman", // Barbara Liskov - co-developed the Liskov substitution principle. Liskov was also the winner of the Turing Prize in 2008. - https://en.wikipedia.org/wiki/Barbara_Liskov "liskov", // Ada Lovelace invented the first algorithm. https://en.wikipedia.org/wiki/Ada_Lovelace (thanks James Turnbull) "lovelace", // Auguste and Louis Lumière - the first filmmakers in history - https://en.wikipedia.org/wiki/Auguste_and_Louis_Lumi%C3%A8re "lumiere", // Mahavira - Ancient Indian mathematician during 9th century AD who discovered basic algebraic identities - https://en.wikipedia.org/wiki/Mah%C4%81v%C4%ABra_(mathematician) "mahavira", // Maria Mayer - American theoretical physicist and Nobel laureate in Physics for proposing the nuclear shell model of the atomic nucleus - https://en.wikipedia.org/wiki/Maria_Mayer "mayer", // John McCarthy invented LISP: https://en.wikipedia.org/wiki/John_McCarthy_(computer_scientist) "mccarthy", // Barbara McClintock - a distinguished American cytogeneticist, 1983 Nobel Laureate in Physiology or Medicine for discovering transposons. https://en.wikipedia.org/wiki/Barbara_McClintock "mcclintock", // Malcolm McLean invented the modern shipping container: https://en.wikipedia.org/wiki/Malcom_McLean "mclean", // Kay McNulty - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Kathleen_Antonelli "mcnulty", // Lise Meitner - Austrian/Swedish physicist who was involved in the discovery of nuclear fission. The element meitnerium is named after her - https://en.wikipedia.org/wiki/Lise_Meitner "meitner", // Carla Meninsky, was the game designer and programmer for Atari 2600 games Dodge 'Em and Warlords. https://en.wikipedia.org/wiki/Carla_Meninsky "meninsky", // Johanna Mestorf - German prehistoric archaeologist and first female museum director in Germany - https://en.wikipedia.org/wiki/Johanna_Mestorf "mestorf", // Marvin Minsky - Pioneer in Artificial Intelligence, co-founder of the MIT's AI Lab, won the Turing Award in 1969. https://en.wikipedia.org/wiki/Marvin_Minsky "minsky", // Maryam Mirzakhani - an Iranian mathematician and the first woman to win the Fields Medal. https://en.wikipedia.org/wiki/Maryam_Mirzakhani "mirzakhani", // Samuel Morse - contributed to the invention of a single-wire telegraph system based on European telegraphs and was a co-developer of the Morse code - https://en.wikipedia.org/wiki/Samuel_Morse "morse", // Ian Murdock - founder of the Debian project - https://en.wikipedia.org/wiki/Ian_Murdock "murdock", // John von Neumann - todays computer architectures are based on the von Neumann architecture. https://en.wikipedia.org/wiki/Von_Neumann_architecture "neumann", // Isaac Newton invented classic mechanics and modern optics. https://en.wikipedia.org/wiki/Isaac_Newton "newton", // Florence Nightingale, more prominently known as a nurse, was also the first female member of the Royal Statistical Society and a pioneer in statistical graphics https://en.wikipedia.org/wiki/Florence_Nightingale//Statistics_and_sanitary_reform "nightingale", // Alfred Nobel - a Swedish chemist, engineer, innovator, and armaments manufacturer (inventor of dynamite) - https://en.wikipedia.org/wiki/Alfred_Nobel "nobel", // Emmy Noether, German mathematician. Noether's Theorem is named after her. https://en.wikipedia.org/wiki/Emmy_Noether "noether", // Poppy Northcutt. Poppy Northcutt was the first woman to work as part of NASA’s Mission Control. http://www.businessinsider.com/poppy-northcutt-helped-apollo-astronauts-2014-12?op=1 "northcutt", // Robert Noyce invented silicone integrated circuits and gave Silicon Valley its name. - https://en.wikipedia.org/wiki/Robert_Noyce "noyce", // Panini - Ancient Indian linguist and grammarian from 4th century CE who worked on the world's first formal system - https://en.wikipedia.org/wiki/P%C4%81%E1%B9%87ini//Comparison_with_modern_formal_systems "panini", // Ambroise Pare invented modern surgery. https://en.wikipedia.org/wiki/Ambroise_Par%C3%A9 "pare", // Louis Pasteur discovered vaccination, fermentation and pasteurization. https://en.wikipedia.org/wiki/Louis_Pasteur. "pasteur", // Cecilia Payne-Gaposchkin was an astronomer and astrophysicist who, in 1925, proposed in her Ph.D. thesis an explanation for the composition of stars in terms of the relative abundances of hydrogen and helium. https://en.wikipedia.org/wiki/Cecilia_Payne-Gaposchkin "payne", // Radia Perlman is a software designer and network engineer and most famous for her invention of the spanning-tree protocol (STP). https://en.wikipedia.org/wiki/Radia_Perlman "perlman", // Rob Pike was a key contributor to Unix, Plan 9, the X graphic system, utf-8, and the Go programming language. https://en.wikipedia.org/wiki/Rob_Pike "pike", // Henri Poincaré made fundamental contributions in several fields of mathematics. https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9 "poincare", // Laura Poitras is a director and producer whose work, made possible by open source crypto tools, advances the causes of truth and freedom of information by reporting disclosures by whistleblowers such as Edward Snowden. https://en.wikipedia.org/wiki/Laura_Poitras "poitras", // Claudius Ptolemy - a Greco-Egyptian writer of Alexandria, known as a mathematician, astronomer, geographer, astrologer, and poet of a single epigram in the Greek Anthology - https://en.wikipedia.org/wiki/Ptolemy "ptolemy", // C. V. Raman - Indian physicist who won the Nobel Prize in 1930 for proposing the Raman effect. - https://en.wikipedia.org/wiki/C._V._Raman "raman", // Srinivasa Ramanujan - Indian mathematician and autodidact who made extraordinary contributions to mathematical analysis, number theory, infinite series, and continued fractions. - https://en.wikipedia.org/wiki/Srinivasa_Ramanujan "ramanujan", // Sally Kristen Ride was an American physicist and astronaut. She was the first American woman in space, and the youngest American astronaut. https://en.wikipedia.org/wiki/Sally_Ride "ride", // Rita Levi-Montalcini - Won Nobel Prize in Physiology or Medicine jointly with colleague Stanley Cohen for the discovery of nerve growth factor (https://en.wikipedia.org/wiki/Rita_Levi-Montalcini) "montalcini", // Dennis Ritchie - co-creator of UNIX and the C programming language. - https://en.wikipedia.org/wiki/Dennis_Ritchie "ritchie", // Wilhelm Conrad Röntgen - German physicist who was awarded the first Nobel Prize in Physics in 1901 for the discovery of X-rays (Röntgen rays). https://en.wikipedia.org/wiki/Wilhelm_R%C3%B6ntgen "roentgen", // Rosalind Franklin - British biophysicist and X-ray crystallographer whose research was critical to the understanding of DNA - https://en.wikipedia.org/wiki/Rosalind_Franklin "rosalind", // Meghnad Saha - Indian astrophysicist best known for his development of the Saha equation, used to describe chemical and physical conditions in stars - https://en.wikipedia.org/wiki/Meghnad_Saha "saha", // Jean E. Sammet developed FORMAC, the first widely used computer language for symbolic manipulation of mathematical formulas. https://en.wikipedia.org/wiki/Jean_E._Sammet "sammet", // Carol Shaw - Originally an Atari employee, Carol Shaw is said to be the first female video game designer. https://en.wikipedia.org/wiki/Carol_Shaw_(video_game_designer) "shaw", // Dame Stephanie 'Steve' Shirley - Founded a software company in 1962 employing women working from home. https://en.wikipedia.org/wiki/Steve_Shirley "shirley", // William Shockley co-invented the transistor - https://en.wikipedia.org/wiki/William_Shockley "shockley", // Françoise Barré-Sinoussi - French virologist and Nobel Prize Laureate in Physiology or Medicine; her work was fundamental in identifying HIV as the cause of AIDS. https://en.wikipedia.org/wiki/Fran%C3%A7oise_Barr%C3%A9-Sinoussi "sinoussi", // Betty Snyder - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Betty_Holberton "snyder", // Frances Spence - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Frances_Spence "spence", // Richard Matthew Stallman - the founder of the Free Software movement, the GNU project, the Free Software Foundation, and the League for Programming Freedom. He also invented the concept of copyleft to protect the ideals of this movement, and enshrined this concept in the widely-used GPL (General Public License) for software. https://en.wikiquote.org/wiki/Richard_Stallman "stallman", // Michael Stonebraker is a database research pioneer and architect of Ingres, Postgres, VoltDB and SciDB. Winner of 2014 ACM Turing Award. https://en.wikipedia.org/wiki/Michael_Stonebraker "stonebraker", // Janese Swanson (with others) developed the first of the Carmen Sandiego games. She went on to found Girl Tech. https://en.wikipedia.org/wiki/Janese_Swanson "swanson", // Aaron Swartz was influential in creating RSS, Markdown, Creative Commons, Reddit, and much of the internet as we know it today. He was devoted to freedom of information on the web. https://en.wikiquote.org/wiki/Aaron_Swartz "swartz", // Bertha Swirles was a theoretical physicist who made a number of contributions to early quantum theory. https://en.wikipedia.org/wiki/Bertha_Swirles "swirles", // Nikola Tesla invented the AC electric system and every gadget ever used by a James Bond villain. https://en.wikipedia.org/wiki/Nikola_Tesla "tesla", // Ken Thompson - co-creator of UNIX and the C programming language - https://en.wikipedia.org/wiki/Ken_Thompson "thompson", // Linus Torvalds invented Linux and Git. https://en.wikipedia.org/wiki/Linus_Torvalds "torvalds", // Alan Turing was a founding father of computer science. https://en.wikipedia.org/wiki/Alan_Turing. "turing", // Varahamihira - Ancient Indian mathematician who discovered trigonometric formulae during 505-587 CE - https://en.wikipedia.org/wiki/Var%C4%81hamihira//Contributions "varahamihira", // Sir Mokshagundam Visvesvaraya - is a notable Indian engineer. He is a recipient of the Indian Republic's highest honour, the Bharat Ratna, in 1955. On his birthday, 15 September is celebrated as Engineer's Day in India in his memory - https://en.wikipedia.org/wiki/Visvesvaraya "visvesvaraya", // Christiane Nüsslein-Volhard - German biologist, won Nobel Prize in Physiology or Medicine in 1995 for research on the genetic control of embryonic development. https://en.wikipedia.org/wiki/Christiane_N%C3%BCsslein-Volhard "volhard", // Marlyn Wescoff - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC - https://en.wikipedia.org/wiki/Marlyn_Meltzer "wescoff", // Andrew Wiles - Notable British mathematician who proved the enigmatic Fermat's Last Theorem - https://en.wikipedia.org/wiki/Andrew_Wiles "wiles", // Roberta Williams, did pioneering work in graphical adventure games for personal computers, particularly the King's Quest series. https://en.wikipedia.org/wiki/Roberta_Williams "williams", // Sophie Wilson designed the first Acorn Micro-Computer and the instruction set for ARM processors. https://en.wikipedia.org/wiki/Sophie_Wilson "wilson", // Jeannette Wing - co-developed the Liskov substitution principle. - https://en.wikipedia.org/wiki/Jeannette_Wing "wing", // Steve Wozniak invented the Apple I and Apple II. https://en.wikipedia.org/wiki/Steve_Wozniak "wozniak", // The Wright brothers, Orville and Wilbur - credited with inventing and building the world's first successful airplane and making the first controlled, powered and sustained heavier-than-air human flight - https://en.wikipedia.org/wiki/Wright_brothers "wright", // Rosalyn Sussman Yalow - Rosalyn Sussman Yalow was an American medical physicist, and a co-winner of the 1977 Nobel Prize in Physiology or Medicine for development of the radioimmunoassay technique. https://en.wikipedia.org/wiki/Rosalyn_Sussman_Yalow "yalow", // Ada Yonath - an Israeli crystallographer, the first woman from the Middle East to win a Nobel prize in the sciences. https://en.wikipedia.org/wiki/Ada_Yonath "yonath" }; public static String next() { String name = String.format("%s%s%s", LEFT[RANDOM.nextInt(LEFT.length)], "_", RIGHT[RANDOM.nextInt(RIGHT.length)]); if ("boring_wozniak".equals(name)) { // Steve Wozniak is not boring return next(); } return name; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/AzkabanApi.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import okhttp3.MultipartBody; import retrofit2.Call; import retrofit2.http.Field; import retrofit2.http.FormUrlEncoded; import retrofit2.http.Multipart; import retrofit2.http.POST; import retrofit2.http.Part; public interface AzkabanApi { @FormUrlEncoded @POST("/?action=login") Call<LoginRes> login(@Field("username") String username, @Field("password") String password); @FormUrlEncoded @POST("/manager?action=create") Call<CreateProjectRes> createProject(@Field("session.id") String sessionId, @Field("name") String name, @Field("description") String description); @Multipart @POST("/manager") Call<UploadProjectRes> uploadProject(@Part MultipartBody.Part action, @Part MultipartBody.Part sessionId, @Part MultipartBody.Part project, @Part MultipartBody.Part file); @FormUrlEncoded @POST("/executor?ajax=executeFlow") Call<ExecuteFlowRes> executeFlow(@Field("session.id") String sessionId, @Field("project") String project, @Field("flow") String flow); @FormUrlEncoded @POST("/executor?ajax=fetchexecflow") Call<FetchFlowExecutionRes> fetchFlowExecution(@Field("session.id") String sessionId, @Field("execid") Integer executionId); }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/AzkabanClient.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import okhttp3.MediaType; import okhttp3.MultipartBody; import okhttp3.OkHttpClient; import okhttp3.RequestBody; import retrofit2.Response; import retrofit2.Retrofit; import retrofit2.converter.jackson.JacksonConverterFactory; import java.io.IOException; import java.util.Optional; import java.util.concurrent.TimeUnit; /** * Stateful client */ public class AzkabanClient { private final AzkabanApi api; public AzkabanClient(String azkabanUrl) { OkHttpClient.Builder clientBuilder = new OkHttpClient.Builder() .readTimeout(60, TimeUnit.SECONDS) .connectTimeout(60, TimeUnit.SECONDS) .writeTimeout(60, TimeUnit.SECONDS) .retryOnConnectionFailure(true); api = new Retrofit.Builder() .client(clientBuilder.build()) .baseUrl(azkabanUrl) .addConverterFactory(JacksonConverterFactory.create()) .build() .create(AzkabanApi.class); } public Optional<String> login(String username, String password) { try { Response<LoginRes> res = api.login(username, password).execute(); LoginRes body = res.body(); if (body != null) { return Optional.of(body.getSessionId()); } return Optional.empty(); } catch (IOException e) { return Optional.empty(); } } public Optional<String> createProject(String sessionId, String name, String description) { try { Response<CreateProjectRes> res = api.createProject(sessionId, name, description).execute(); CreateProjectRes body = res.body(); if (body != null && body.isSuccess()) { return Optional.of(name); } return Optional.empty(); } catch (IOException e) { return Optional.empty(); } } public boolean uploadProject(String sessionId, String project, byte[] fileContent) { RequestBody file = MultipartBody.create(MediaType.parse("application/zip"), fileContent); MultipartBody.Part filePart = MultipartBody.Part.createFormData("file", "project.zip", file); MultipartBody.Part projectNamePart = MultipartBody.Part.createFormData("project", project); MultipartBody.Part sessionIdPart = MultipartBody.Part.createFormData("session.id", sessionId); MultipartBody.Part action = MultipartBody.Part.createFormData("ajax", "upload"); try { Response<UploadProjectRes> res = api.uploadProject(action, sessionIdPart, projectNamePart, filePart).execute(); UploadProjectRes body = res.body(); return body != null && body.getError() == null; } catch (IOException e) { return false; } } public Optional<Integer> executeFlow(String sessionId, String project, String flow) { try { Response<ExecuteFlowRes> res = api.executeFlow(sessionId, project, flow).execute(); ExecuteFlowRes body = res.body(); if (body == null || !body.isSuccess()) { return Optional.empty(); } return Optional.of(body.getExecId()); } catch (IOException e) { return Optional.empty(); } } public Optional<FetchFlowExecutionRes> fetchFlowExecution(String sessionId, Integer executionId) { try { Response<FetchFlowExecutionRes> res = api.fetchFlowExecution(sessionId, executionId).execute(); FetchFlowExecutionRes body = res.body(); if (body == null || !body.isSuccess()) { return Optional.empty(); } return Optional.of(body); } catch (IOException e) { return Optional.empty(); } } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/CreateProjectRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class CreateProjectRes { private String status; private String message; public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public boolean isSuccess() { return "success".equalsIgnoreCase(status) || "Project already exists.".equalsIgnoreCase(message); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/ExecuteFlowRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.annotation.JsonProperty; @JsonIgnoreProperties(ignoreUnknown = true) public class ExecuteFlowRes { private String message; private String project; private String flow; @JsonProperty("execid") private int execId; public String getMessage() { return message; } public void setMessage(String message) { this.message = message; } public String getProject() { return project; } public void setProject(String project) { this.project = project; } public String getFlow() { return flow; } public void setFlow(String flow) { this.flow = flow; } public int getExecId() { return execId; } public void setExecId(int execId) { this.execId = execId; } public boolean isSuccess() { return message != null && message.contains("successfully"); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/FetchFlowExecutionRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class FetchFlowExecutionRes { private String status; public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public boolean isSuccess() { return "SUCCEEDED".equalsIgnoreCase(status); } public boolean isFailed() { return "FAILED".equalsIgnoreCase(status); } public boolean isCompleted() { return isSuccess() || isFailed(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/LoginRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; import com.fasterxml.jackson.annotation.JsonProperty; public class LoginRes { @JsonProperty("session.id") private String sessionId; private String status; public String getSessionId() { return sessionId; } public void setSessionId(String sessionId) { this.sessionId = sessionId; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/azkaban/UploadProjectRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.azkaban; public class UploadProjectRes { private String error; private String projectId; private String version; public String getError() { return error; } public void setError(String error) { this.error = error; } public String getProjectId() { return projectId; } public void setProjectId(String projectId) { this.projectId = projectId; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/DbndAgentConfig.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.HashMap; import java.util.Map; import static ai.databand.DbndPropertyNames.DBND__SPARK__IO_TRACKING_ENABLED; import static ai.databand.DbndPropertyNames.DBND__VERBOSE; public class DbndAgentConfig { private final Map<String, String> properties; public DbndAgentConfig(String args) { properties = new HashMap<>(1); if (args == null) { return; } args = args.trim(); if (args.isEmpty()) { return; } for (String argPair : args.split(",")) { String[] keyValue = argPair.split("="); if (keyValue.length != 2) { throw new IllegalArgumentException("Arguments for the agent should be like: key1=value1,key2=value2"); } String key = keyValue[0].trim(); if (key.isEmpty()) { throw new IllegalArgumentException("Argument key should not be empty"); } properties.put(key, keyValue[1].trim()); } } protected final boolean isTrue(String key) { return Boolean.TRUE.toString().equalsIgnoreCase(properties.get(key)); } public boolean isVerbose() { return isTrue(DBND__VERBOSE); } public boolean sparkIoTrackingEnabled() { return isTrue(DBND__SPARK__IO_TRACKING_ENABLED); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/DbndConfig.java
/* * © Copyright Databand.ai, an IBM Company 2022-2024 */ package ai.databand.config; import ai.databand.RandomNames; import ai.databand.schema.AirflowTaskContext; import ai.databand.schema.AzkabanTaskContext; import ai.databand.schema.DatabandTaskContext; import org.apache.spark.SparkContext; import org.apache.spark.sql.SparkSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.FileInputStream; import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.Optional; import java.util.Properties; import java.util.UUID; import java.util.stream.Collectors; import static ai.databand.DbndPropertyNames.*; /** * Databand configuration. */ public class DbndConfig implements PropertiesSource { private static final Logger LOG = LoggerFactory.getLogger(DbndConfig.class); private final AirflowTaskContext afCtx; private final AzkabanTaskContext azkbnCtx; private final DatabandTaskContext dbndCtx; private final String fallbackTraceId; private final boolean previewEnabled; private boolean trackingEnabled; private final String databandUrl; private final String cmd; private final String runName; private final Map<String, String> props; private String sparkJobType, sparkNotebookPath, sparkNotebookName; //Not collecting any logs by default private static final int PREVIEW_HEAD_TAIL_DEFAULT = 0; /** * Default override order, from higher priority to lowest: * 1. Spark config * 2. Process environment variables * 3. Java process system properties */ public DbndConfig() { this( new DbndSparkConf( new Env( new JavaOpts() ) ) ); } public DbndConfig(PropertiesSource props) { this(props, System.getProperties().getProperty("sun.java.command")); } public DbndConfig(PropertiesSource props, String cmd) { this.cmd = cmd; this.props = props.values(); this.sparkJobType = null; this.sparkNotebookPath = null; this.sparkNotebookName = null; afCtx = buildAirflowCtxFromEnv(this.props); azkbnCtx = buildAzkabanCtxFromEnv(this.props); dbndCtx = buildDatabandCtxFromEnv(this.props); // used if no dbndCtx available fallbackTraceId = UUID.randomUUID().toString(); previewEnabled = isTrue(this.props, DBND__TRACKING__DATA_PREVIEW) || isTrue(this.props, DBND__TRACKING__LOG_VALUE_PREVIEW); databandUrl = this.props.getOrDefault(DBND__CORE__DATABAND_URL, "http://localhost:8080"); // tracking should be explicitly opt int when we're not running inside Airflow trackingEnabled = afCtx != null ? !isFalse(this.props, DBND__TRACKING) || isMissing(this.props, DBND__TRACKING) : isTrue(this.props, DBND__TRACKING); runName = azkbnCtx == null ? this.props.getOrDefault(DBND__RUN__NAME, RandomNames.next()) : azkbnCtx.jobRunName(); } private AirflowTaskContext buildAirflowCtxFromEnv(Map<String, String> env) { if (env.containsKey(AIRFLOW_CTX_DAG_ID) && env.containsKey(AIRFLOW_CTX_EXECUTION_DATE) && env.containsKey(AIRFLOW_CTX_TASK_ID) && env.containsKey(AIRFLOW_CTX_TRY_NUMBER)) { return new AirflowTaskContext( env.get(AIRFLOW_CTX_UID), env.get(AIRFLOW_CTX_UID), env.get(AIRFLOW_CTX_DAG_ID), env.get(AIRFLOW_CTX_EXECUTION_DATE), env.get(AIRFLOW_CTX_TASK_ID), env.get(AIRFLOW_CTX_TRY_NUMBER) ); } else { return null; } } private DatabandTaskContext buildDatabandCtxFromEnv(Map<String, String> env) { if (env.containsKey(DBND_ROOT_RUN_UID) && env.containsKey(DBND_PARENT_TASK_RUN_UID) && env.containsKey(DBND_PARENT_TASK_RUN_ATTEMPT_UID)) { return new DatabandTaskContext( env.get(DBND_ROOT_RUN_UID), env.get(DBND_PARENT_TASK_RUN_UID), env.get(DBND_PARENT_TASK_RUN_ATTEMPT_UID), env.getOrDefault(DBND_TRACE_ID, UUID.randomUUID().toString()) ); } else { return null; } } private AzkabanTaskContext buildAzkabanCtxFromEnv(Map<String, String> env) { Optional<AzkabanTaskContext> fromFile = readFromProperties(env); return fromFile.orElseGet(() -> buildFromMap(env)); } private AzkabanTaskContext buildFromMap(Map<String, String> env) { if (env.containsKey("azkaban.flow.flowid") && env.containsKey("azkaban.flow.flowid") && env.containsKey("azkaban.flow.uuid") && env.containsKey("azkaban.flow.execid") && env.containsKey("azkaban.job.id")) { return new AzkabanTaskContext( env.get("azkaban.flow.flowid"), env.get("azkaban.flow.flowid"), env.get("azkaban.flow.uuid"), env.get("azkaban.flow.execid"), env.get("azkaban.job.id"), this ); } else { return null; } } private Optional<AzkabanTaskContext> readFromProperties(Map<String, String> env) { if (env.containsKey("JOB_PROP_FILE")) { String fileName = env.get("JOB_PROP_FILE"); try (FileInputStream input = new FileInputStream(fileName)) { Properties props = new Properties(); props.load(input); return Optional.ofNullable(buildFromMap((Map) props)); } catch (IOException e) { return Optional.empty(); } } return Optional.empty(); } public String databandUrl() { return databandUrl; } public boolean isPreviewEnabled() { return previewEnabled; } public boolean isTrackingEnabled() { return trackingEnabled; } public void setTrackingEnabled(boolean trackingEnabled) { this.trackingEnabled = trackingEnabled; } public Optional<AirflowTaskContext> airflowContext() { return Optional.ofNullable(afCtx); } public Optional<AzkabanTaskContext> azkabanContext() { return Optional.ofNullable(azkbnCtx); } public Optional<DatabandTaskContext> databandTaskContext() { return Optional.ofNullable(dbndCtx); } public String getTraceId() { if (this.databandTaskContext().isPresent()) { return this.databandTaskContext().get().getTraceId(); } return fallbackTraceId; } public String cmd() { return cmd; } public String runName() { return runName; } public Optional<String> jobName() { return getValue(DBND__RUN__JOB_NAME); } public Optional<String> projectName() { return getValue(DBND__TRACKING__PROJECT); } public Optional<String> csrfToken() { return getValue(DBND__CSRF_TOKEN); } public Optional<String> sessionCookie() { return getValue(DBND__SESSION_COOKIE); } public Optional<String> personalAccessToken() { return getValue(DBND__CORE__DATABAND_ACCESS_TOKEN); } public boolean isVerbose() { return isTrue(DBND__VERBOSE); } public int previewTotalBytes() { return previewHeadBytes() + previewTailBytes(); } public int previewHeadBytes() { return getInteger(DBND__LOG__PREVIEW_HEAD_BYTES, PREVIEW_HEAD_TAIL_DEFAULT); } public int previewTailBytes() { return getInteger(DBND__LOG__PREVIEW_TAIL_BYTES, PREVIEW_HEAD_TAIL_DEFAULT); } public boolean isSendingLogs() { return previewHeadBytes() > 0 || previewTailBytes() > 0; } protected Integer getInteger(String key, Integer defaultValue) { String value = props.get(key); if (value == null) { return defaultValue; } try { return Integer.parseInt(value); } catch (NumberFormatException e) { LOG.error("Unable to read integer value from {}. Returning default value {}", value, defaultValue); return defaultValue; } } public void setSparkProperties(Properties sparkProps) { try { // some Spark properties might change between Spark job runs, but we are only interested in global Spark properties which remains the same if(sparkJobType != null) { return; // all global Spark settings are already propagated } sparkJobType = "NotDetected"; setDatabricksProperties(sparkProps); if(isVerbose()) { LOG.info("v propagated Spark properties, job = '{}', path = '{}', name = '{}'", sparkJobType, sparkNotebookPath, sparkNotebookName); } } catch(Exception e) { LOG.error("Unable to extract job name and path from spark properties", e); } } private void setDatabricksProperties(Properties sparkProps) { String jobType = sparkProps.getProperty("spark.databricks.job.type"); if(jobType == null) { return; // not a Databricks Spark } sparkJobType = jobType; if(jobType.equals("notebook") || jobType.equals("python")) { // e.g. /Users/my.user@ibm.com/my_notebook_name or dbfs:/FileStore/job-jars/mysuser/myscript.py sparkNotebookPath = sparkProps.getProperty("spark.databricks.notebook.path"); // it should not be null for "notebook"/"python" job type if(sparkNotebookPath == null) { LOG.warn("Not able to detect Notebook/Python name. Spark application name \"spark.app.name\" will be used instead."); return; } sparkNotebookName = sparkNotebookPath.substring(sparkNotebookPath.lastIndexOf('/') + 1); } } public String sparkAppName() { // todo: detect we're running inside spark try { if(sparkNotebookName != null && !sparkNotebookName.isEmpty()) { // Use Databricks notebook name if available // Spark.app.name is always set to "Databricks Shell" for Python/Notebook tasks on Databricks return sparkNotebookName; } SparkSession session = SparkSession.active(); SparkContext ctx = session.sparkContext(); return ctx.getConf().get("spark.app.name"); } catch (Exception e) { return "none"; } } public Map<String, String> values() { return Collections.unmodifiableMap(props); } protected final boolean isTrue(String key) { return Boolean.TRUE.toString().equalsIgnoreCase(props.get(key)); } protected final boolean isFalse(String key) { return Boolean.FALSE.toString().equalsIgnoreCase(props.get(key)); } protected final boolean isTrue(Map<String, String> env, String key) { return Boolean.TRUE.toString().equalsIgnoreCase(env.get(key)); } protected final boolean isFalse(Map<String, String> env, String key) { return Boolean.FALSE.toString().equalsIgnoreCase(env.get(key)); } protected final boolean isMissing(Map<String, String> env, String key) { return !env.containsKey(key); } @Override public Optional<String> getValue(String key) { String value = props.get(key); if (value == null || value.isEmpty()) { return Optional.empty(); } return Optional.of(value); } /** * Mask sensitive config values. * * @param key * @param value * @return */ protected String maskValue(String key, String value) { if (value == null) { return null; } if (key.toLowerCase().contains("token")) { return "***"; } return value; } @Override public String toString() { if (props == null || props.isEmpty()) { return "{}"; } return "\n" + props.keySet().stream() .filter(key -> key.toLowerCase().startsWith("dbnd") || key.toLowerCase().startsWith("airflow")) .map(key -> key + "=" + maskValue(key, props.get(key))) .collect(Collectors.joining("\n")); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/DbndSparkConf.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import org.apache.spark.SparkConf; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** * Spark config properties source. Values are passed in uppercase+underscore format. */ public class DbndSparkConf implements PropertiesSource { private static final Logger LOG = LoggerFactory.getLogger(DbndSparkConf.class); private final Map<String, String> props; public DbndSparkConf(PropertiesSource parent) { Map<String, String> sparkConf; try { sparkConf = this.sparkConfToMap(new SparkConf()); } catch (Exception e) { LOG.warn("Databand is unable to resolve active spark session, 'spark.env.DBND...' variables won't be parsed"); sparkConf = Collections.emptyMap(); } Map<String, String> sparkProps = new HashMap<>(1); props = new HashMap<>(parent.values()); for (Map.Entry<String, String> next : sparkConf.entrySet()) { if (next.getKey().startsWith("spark.env.")) { sparkProps.put(next.getKey().replace("spark.env.", ""), next.getValue()); } } props.putAll(new NormalizedProps(sparkProps).values()); } /** * Puts spark application configuration in SparkConf object as key-value in map object * * @param sparkConf configuration for spark application * @return spark configuration as java map object */ private Map<String, String> sparkConfToMap(SparkConf sparkConf) { Map<String, String> result = new HashMap<>(); Arrays.stream(sparkConf.getAll()) .forEach(x -> result.put(x._1(), x._2)); return result; } public Map<String, String> values() { return Collections.unmodifiableMap(props); } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/Env.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** * Environment variables properties source. Vars are passed using uppercase+underscore format: * DBND__TRACKING__ENABLED=True */ public class Env implements PropertiesSource { private final Map<String, String> props; public Env(PropertiesSource parent) { Map<String, String> parentProps = parent.values(); props = new HashMap<>(parentProps); props.putAll(new NormalizedProps(System.getenv()).values()); } public Env() { this(new SimpleProps()); } public Map<String, String> values() { return Collections.unmodifiableMap(props); } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/JavaOpts.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; import java.util.stream.Collectors; /** * JAVA_OPTS properties source. Variables are passed in lowercase+dot format: * java -jar ... -Ddbnd.tracking.enabled=True */ public class JavaOpts implements PropertiesSource { private final Map<String, String> props; public JavaOpts() { this(new SimpleProps()); } public JavaOpts(PropertiesSource parent) { this( parent, System.getProperties() .entrySet() .stream() .collect(Collectors.toMap(e -> e.getKey().toString(), e -> e.getValue().toString())) ); } public JavaOpts(Map<String, String> systemProps) { this(new SimpleProps(), systemProps); } public JavaOpts(PropertiesSource parent, Map<String, String> systemProps) { props = new HashMap<>(parent.values()); props.putAll(new NormalizedProps(systemProps).values()); } public Map<String, String> values() { return Collections.unmodifiableMap(props); } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/NormalizedProps.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.Optional; /** * Normalizes properties. DBND__TRACKING__ENABLED to dbnd.tracking.enabled. */ public class NormalizedProps implements PropertiesSource { private final Map<String, String> props; public NormalizedProps(Map<String, String> propsToNormalize) { props = new HashMap<>(); for (Map.Entry<String, String> prop : propsToNormalize.entrySet()) { String key = prop.getKey(); String normalizedValue = prop.getValue().trim(); if (key.toLowerCase().startsWith("dbnd")) { String normalizedKey = key.replace("__", ".").toLowerCase(); props.put(normalizedKey, normalizedValue); } else { props.put(key, prop.getValue()); } } } @Override public Map<String, String> values() { return Collections.unmodifiableMap(props); } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/PropertiesSource.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.Map; import java.util.Optional; public interface PropertiesSource { Map<String, String> values(); Optional<String> getValue(String key); }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/config/SimpleProps.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.config; import java.util.Collections; import java.util.Map; import java.util.Optional; public class SimpleProps implements PropertiesSource { private final Map<String, String> props; public SimpleProps(Map<String, String> props) { this.props = props; } public SimpleProps() { this.props = Collections.emptyMap(); } @Override public Map<String, String> values() { return Collections.unmodifiableMap(props); } @Override public Optional<String> getValue(String key) { return Optional.ofNullable(props.get(key)); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/id/Sha1.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.id; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Arrays; public class Sha1 { private final byte[] value; public Sha1(String namespace, String name) { try { MessageDigest md = MessageDigest.getInstance("SHA-1"); byte[] namespaceBytes = namespace.getBytes(); byte[] nameBytes = name.getBytes(StandardCharsets.UTF_8); byte[] both = Arrays.copyOf(namespaceBytes, namespaceBytes.length + nameBytes.length); System.arraycopy(nameBytes, 0, both, namespaceBytes.length, nameBytes.length); value = md.digest(both); } catch (NoSuchAlgorithmException e) { throw new RuntimeException("Unable to get SHA-1 digest"); } } public byte[] value() { return value; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/id/Sha1Long.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.id; import java.util.Base64; public class Sha1Long { private final String value; public Sha1Long(String namespace, String value) { this.value = Base64.getEncoder().encodeToString(new Sha1(namespace, value).value()); } @Override public String toString() { return value; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/id/Sha1Short.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.id; public class Sha1Short { private final String value; public Sha1Short(String namespace, String name) { this.value = new Sha1Long(namespace, name).toString().substring(0, 8); } @Override public String toString() { return value; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/id/Uuid5.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.id; import java.util.Objects; import java.util.UUID; public class Uuid5 { public static UUID NAMESPACE_DBND = Uuid5Raw.fromString(Uuid5Raw.NAMESPACE_DNS, "databand.ai"); public static UUID NAMESPACE_DBND_JOB = Uuid5Raw.fromString(NAMESPACE_DBND, "job"); public static UUID NAMESPACE_DBND_RUN = Uuid5Raw.fromString(NAMESPACE_DBND, "run"); public static UUID NAMESPACE_DBND_TASK_DEF = Uuid5Raw.fromString(NAMESPACE_DBND, "task_definition"); private final String value; public Uuid5(UUID namespace, String name) { this.value = Uuid5Raw.fromString(namespace, name).toString(); } public Uuid5(String namespace, String name) { Sha1 digest = new Sha1(namespace, name); UUID uuid = Uuid5Raw.bytesToUuid(digest.value()); this.value = uuid.toString(); } @Override public String toString() { return value; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } Uuid5 uuid5 = (Uuid5) o; return value.equals(uuid5.value); } @Override public int hashCode() { return Objects.hash(value); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/id/Uuid5Raw.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.id; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.Objects; import java.util.UUID; /** * UUID5 generator. */ public class Uuid5Raw { // Predefined DNS namespace public static final UUID NAMESPACE_DNS = UUID.fromString("6ba7b810-9dad-11d1-80b4-00c04fd430c8"); /** * UUID5 is UUID(SHA-1(namespace + name)). * See <a href="https://www.rfc-editor.org/rfc/rfc4122">RFC4122</a> for details. * * @param namespace * @param name * @return */ public static UUID fromString(UUID namespace, String name) { try { // step 0: check input Objects.requireNonNull(namespace, "UUID5 namespace should not be null"); Objects.requireNonNull(name, "UUID5 name should not be null"); // step 1: generate hash MessageDigest md = MessageDigest.getInstance("SHA-1"); md.update(uuidToBytes(namespace)); md.update(name.getBytes(StandardCharsets.UTF_8)); byte[] digest = md.digest(); // step 2: set most significant bits of octet 6 to version "5" digest[6] &= 0x0F; digest[6] |= 0x50; // step 3: set most significant bits of octet 8 to variant "IETF" digest[8] &= 0x3F; digest[8] |= 0x80; // step 4: take first 16 bytes from the digest and make UUID return bytesToUuid(digest); } catch (NoSuchAlgorithmException e) { throw new InternalError("SHA-1 not supported"); } } /** * Convert byte array to UUID using unrolled loop. * * @param bytes * @return */ protected static UUID bytesToUuid(byte[] bytes) { long msb = ((long) bytes[0] << 56) | ((long) bytes[1] & 0xff) << 48 | ((long) bytes[2] & 0xff) << 40 | ((long) bytes[3] & 0xff) << 32 | ((long) bytes[4] & 0xff) << 24 | ((long) bytes[5] & 0xff) << 16 | ((long) bytes[6] & 0xff) << 8 | ((long) bytes[7] & 0xff); long lsb = ((long) bytes[8] << 56) | ((long) bytes[9] & 0xff) << 48 | ((long) bytes[10] & 0xff) << 40 | ((long) bytes[11] & 0xff) << 32 | ((long) bytes[12] & 0xff) << 24 | ((long) bytes[13] & 0xff) << 16 | ((long) bytes[14] & 0xff) << 8 | ((long) bytes[15] & 0xff); return new UUID(msb, lsb); } /** * Convert UUID to byte array. * <p> * UUID is 128 bits. * Array is composed via 64 most significant bits and 64 least significant bits in a big-endian order. * This is unrolled loop. * * @param uuid * @return */ protected static byte[] uuidToBytes(UUID uuid) { long msb = uuid.getMostSignificantBits(); long lsb = uuid.getLeastSignificantBits(); return new byte[]{ (byte) (msb >> 56), (byte) (msb >> 48), (byte) (msb >> 40), (byte) (msb >> 32), (byte) (msb >> 24), (byte) (msb >> 16), (byte) (msb >> 8), (byte) msb, (byte) (lsb >> 56), (byte) (lsb >> 48), (byte) (lsb >> 40), (byte) (lsb >> 32), (byte) (lsb >> 24), (byte) (lsb >> 16), (byte) (lsb >> 8), (byte) lsb}; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/log/DbndLogger.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.log; import ai.databand.DbndWrapper; import ai.databand.schema.DatasetOperationStatus; import ai.databand.schema.DatasetOperationType; import org.apache.spark.sql.Dataset; /** * Report metrics and dataframes to Databand. * This class is safe to use in any circumstances and won't break client code. */ public class DbndLogger { /** * Report single metric. * * @param key metric key * @param value metric value. Value will be converted using following rules: * integers and doubles will be converted to integers and doubles * strings will be converted to strings * spark datasets will generate preview * toString() will be called for the rest of the types */ public static void logMetric(String key, Object value) { try { DbndWrapper.instance().logMetric(key, value); } catch (Throwable e) { System.out.println("DbndLogger: Unable to log metric"); e.printStackTrace(); } } /** * Report Spark dataframe. Descriptive statistics and histograms will be generated for each column. * * @param key * @param value * @param withHistograms */ public static void logDataframe(String key, Object value, boolean withHistograms) { try { DbndWrapper.instance().logDataframe(key, (Dataset<?>) value, withHistograms); } catch (Throwable e) { System.out.println("DbndLogger: Unable to log dataframe"); e.printStackTrace(); } } /** * Report Spark dataframe. Descriptive statistics and histograms will be generated for each column. * Reporting can be customized using HistogramRequest object. * * @param key * @param value * @param histogramRequest */ public static void logDataframe(String key, Object value, HistogramRequest histogramRequest) { try { DbndWrapper.instance().logDataframe(key, (Dataset<?>) value, histogramRequest); } catch (Throwable e) { System.out.println("DbndLogger: Unable to log dataframe"); e.printStackTrace(); } } /** * Report success dataset operation. Schema and preview will automatically be calculated. * * @param path data path (S3, filesystem, GCS etc) * @param type operation type — read/write * @param data spark dataset */ public static void logDatasetOperation(String path, DatasetOperationType type, Dataset<?> data) { logDatasetOperation(path, type, DatasetOperationStatus.OK, data, new LogDatasetRequest()); } public static void logDatasetOperation(String path, DatasetOperationType type, Dataset<?> data, LogDatasetRequest params) { logDatasetOperation(path, type, DatasetOperationStatus.OK, data, params); } /** * Report dataset operation. Schema and preview will automatically be calculated. * * @param path data path (S3, filesystem, GCS etc) * @param type operation type — read/write * @param status operation status — success/failure * @param data spark dataset */ public static void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data) { logDatasetOperation(path, type, status, data, new LogDatasetRequest()); } /** * Report dataset operation. Schema and preview will automatically be calculated. * * @param path data path (S3, filesystem, GCS etc) * @param type operation type — read/write * @param status operation status — success/failure * @param data spark dataset */ public static void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, LogDatasetRequest params) { logDatasetOperation(path, type, status, data, null, params); } /** * Report dataset operation and error operation if occurred. Schema and preview will automatically be calculated. * * @param path data path (S3, filesystem, GCS etc) * @param type operation type — read/write * @param status operation status — success/failure * @param data spark dataset * @param error error when operation was failed */ public static void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error) { logDatasetOperation(path, type, status, data, error, new LogDatasetRequest()); } public static void logDatasetOperation(String path, DatasetOperationType type, DatasetOperationStatus status, Dataset<?> data, Throwable error, LogDatasetRequest params) { try { DbndWrapper.instance().logDatasetOperation(path, type, status, data, error, params); } catch (Throwable e) { System.out.println("DbndLogger: Unable to log dataset operation"); e.printStackTrace(); } } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/log/HistogramRequest.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.log; import java.util.Collection; import java.util.HashSet; import java.util.Set; /** * Allows to customize dataframe reporting. Only boolean, numeric and string columns are supported. */ public class HistogramRequest { private final Set<String> includeColumns = new HashSet<>(1); private final Set<String> excludeColumns = new HashSet<>(1); private boolean includeAllNumeric; private boolean includeAllString; private boolean includeAllBoolean; private boolean onlyStats; private boolean approxDistinct; private final boolean enabled; /** * Default constructor assumes full descriptive statistics and histograms calculation */ public HistogramRequest() { this.enabled = true; } /** * @param all */ public HistogramRequest(boolean all) { this.enabled = all; if (all) { this.includeAllString = true; this.includeAllBoolean = true; this.includeAllNumeric = true; } } /** * Column names to include into report. Unless provided, all columns will be reported, except of excluded. * If provided, only these columns will be included to the report. * * @param columns * @return */ public HistogramRequest includeColumns(Collection<String> columns) { this.includeColumns.addAll(columns); return this; } /** * Exclude columns from calculations. * * @param columns * @return */ public HistogramRequest excludeColumns(Collection<String> columns) { this.excludeColumns.addAll(columns); return this; } /** * Include all boolean columns. * * @return */ public HistogramRequest includeAllBoolean() { this.includeAllBoolean = true; return this; } /** * Include all numeric columns. * * @return */ public HistogramRequest includeAllNumeric() { this.includeAllNumeric = true; return this; } /** * Include all string columns. * * @return */ public HistogramRequest includeAllString() { this.includeAllString = true; return this; } /** * Generate only descriptive statistics. * * @return */ public HistogramRequest onlyStats() { this.onlyStats = true; return this; } /** * Use approximate distinct calculation method. May speed up calculations. * * @return */ public HistogramRequest approxDistinct() { this.approxDistinct = true; return this; } protected Set<String> getIncludeColumns() { return includeColumns; } public Set<String> getExcludeColumns() { return excludeColumns; } public boolean isIncludeAllBoolean() { return includeAllBoolean; } public boolean isIncludeAllNumeric() { return includeAllNumeric; } public boolean isIncludeAllString() { return includeAllString; } public boolean isOnlyStats() { return onlyStats; } public boolean isApproxDistinct() { return approxDistinct; } public boolean isEnabled() { return enabled; } public boolean isExcluded(String column) { return excludeColumns.contains(column); } /** * Generate report for all columns. * * @return */ public static HistogramRequest ALL() { return new HistogramRequest(true); } /** * Generate report only for string columns. * * @return */ public static HistogramRequest ALL_STRING() { return new HistogramRequest().includeAllString(); } /** * Generate report only for boolean columns. * * @return */ public static HistogramRequest ALL_BOOLEAN() { return new HistogramRequest().includeAllBoolean(); } /** * Generate report only for numeric columns. * * @return */ public static HistogramRequest ALL_NUMERIC() { return new HistogramRequest().includeAllNumeric(); } /** * Do not generate report at all. * * @return */ public static HistogramRequest NONE() { return new HistogramRequest(false); } /** * Default request assumes all columns. * * @return */ public static HistogramRequest DEFAULT() { return ALL(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/log/LogDatasetRequest.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.log; /** * Configuration for logDatasetOperation request. */ public class LogDatasetRequest { private Boolean withHistograms = null; private Boolean withPartition = null; private Boolean withStats = true; private Boolean withPreview = false; private Boolean withSchema = true; public LogDatasetRequest withHistograms() { return withHistograms(true); } public LogDatasetRequest withHistograms(Boolean withHistograms) { this.withHistograms = withHistograms; return this; } public LogDatasetRequest withPartition() { return withPartition(true); } public LogDatasetRequest withPartition(Boolean withPartition) { this.withPartition = withPartition; return this; } public LogDatasetRequest withStats() { return withStats(true); } public LogDatasetRequest withStats(Boolean withStats) { this.withStats = withStats; return this; } public LogDatasetRequest withPreview() { return withPreview(true); } public LogDatasetRequest withPreview(Boolean withPreview) { this.withPreview = withPreview; return this; } public LogDatasetRequest withSchema() { return withSchema(true); } public LogDatasetRequest withSchema(Boolean withSchema) { this.withSchema = withSchema; return this; } public Boolean getWithHistograms() { return withHistograms; } public Boolean getWithPartition() { return withPartition; } public Boolean getWithStats() { return withStats; } public Boolean getWithPreview() { return withPreview; } public Boolean getWithSchema() { return withSchema; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/log/TruncatedLog.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.log; import ai.databand.config.DbndConfig; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.file.Files; import java.util.Arrays; import java.util.stream.Collectors; import java.util.stream.Stream; public class TruncatedLog { private static final Logger LOG = LoggerFactory.getLogger(TruncatedLog.class); public static final String EMPTY_LOG_MSG = "Log truncated to empty"; public static final String EMPTY_ERROR_LOG_MSG = "Error occurred during reading log file"; public static final String PLACEHOLDER = "\r\n...\r\n\r\nThe log body is truncated by databand, fetched %s bytes for the `head` and %s bytes for the `tail` from the whole {file_size} bytes of the file.\\r\\nControl the log preview length with dbnd.log.preview_head_bytes and dbnd.log.preview_tail_bytes\r\n\r\n...\r\n"; private String log; public TruncatedLog(DbndConfig config, String logBody) { int headBytes = config.previewHeadBytes(); int tailBytes = config.previewTailBytes(); byte[] logBytes = logBody.getBytes(); if (headBytes == 0 && tailBytes == 0) { // truncate to zero this.log = EMPTY_LOG_MSG; return; } if (headBytes + tailBytes >= logBytes.length) { // do not truncate at all this.log = logBody; } else { // fetch head + tail String headStr = new String(Arrays.copyOfRange(logBytes, 0, headBytes)); String tailStr = new String(Arrays.copyOfRange(logBytes, logBytes.length - tailBytes, logBytes.length)); this.log = headStr + String.format(PLACEHOLDER, headBytes, tailBytes) + tailStr; } } public TruncatedLog(DbndConfig config, File logFile) { int headBytes = config.previewHeadBytes(); int tailBytes = config.previewTailBytes(); try { if (logFile.length() <= config.previewTotalBytes()) { try (Stream<String> lines = Files.lines(logFile.toPath())) { this.log = lines.collect(Collectors.joining("\n")); } } else { // todo: potential OOM in case of very large limits byte[] head = new byte[headBytes]; byte[] tail = new byte[tailBytes]; try (RandomAccessFile raf = new RandomAccessFile(logFile, "r")) { raf.read(head, 0, headBytes); raf.seek(logFile.length() - tailBytes); raf.read(tail, 0, tailBytes); this.log = new String(head) + String.format(PLACEHOLDER, headBytes, tailBytes) + new String(tail); } } } catch (IOException e) { LOG.error(String.format("Unable to read log file %s", logFile.getAbsolutePath()), e); this.log = EMPTY_ERROR_LOG_MSG; } } public String toString() { return log; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/DatasetOperationPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import ai.databand.schema.ColumnStats; import ai.databand.schema.DatasetOperationSchema; import ai.databand.schema.Pair; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.types.StructField; import org.apache.spark.sql.types.StructType; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static ai.databand.DbndPropertyNames.DBND_INTERNAL_ALIAS; public class DatasetOperationPreview extends DatasetPreview { @Override public Object schema(Dataset<Row> input) { Dataset<?> schemaAlias = input.alias(String.format("%s_%s", DBND_INTERNAL_ALIAS, "SCHEMA")); return extractSchema(schemaAlias.schema(), schemaAlias.count()).left(); } public Pair<String, List<Long>> extractSchema(StructType schema, long rows) { try { List<String> columns = new ArrayList<>(schema.fields().length); Map<String, String> dtypes = new HashMap<>(schema.fields().length); for (StructField field : schema.fields()) { columns.add(field.name()); dtypes.put(field.name(), field.dataType().typeName()); } List<Long> shape = Arrays.asList(rows, (long) columns.size()); try { return new Pair<>(new ObjectMapper().writeValueAsString(new DatasetOperationSchema(columns, dtypes, shape)), shape); } catch (JsonProcessingException e) { return new Pair<>("", shape); } } catch (Exception e) { return new Pair<>("", Collections.emptyList()); } } public List<ColumnStats> extractColumnStats(StructType schema, long rows) { return Arrays.stream(schema.fields()) .map(field -> new ColumnStats() .setColumnName(field.name()) .setColumnType(field.dataType().typeName()) .setRecordsCount(rows)) .collect(Collectors.toList()); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/DatasetPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import java.util.Arrays; import java.util.List; import static ai.databand.DbndPropertyNames.DBND_INTERNAL_ALIAS; public class DatasetPreview implements TaskParameterPreview<Dataset<Row>> { @Override public String compact(Dataset<Row> input) { return "Dataset"; } @Override public String full(Dataset<Row> input) { Dataset<?> previewAlias = input.alias(String.format("%s_%s", DBND_INTERNAL_ALIAS, "PREVIEW")); try { return previewAlias.showString(20, 2048, false); } catch (Exception e) { return ""; } } @Override public String typeName(Class<Dataset<Row>> input) { return "Dataset"; } @Override public Object schema(Dataset<Row> input) { Dataset<?> previewSchemaAlias = input.alias(String.format("%s_%s", DBND_INTERNAL_ALIAS, "PREVIEW_SCHEMA")); try { return previewSchemaAlias.schema().prettyJson(); } catch (Exception e) { return ""; } } /** * This method calculates the exact size of the dataframe. * TODO: opt-out of dimensions calculation because count() kicks out new job and it may take a lot of time. * There is no easy way to calculate dataframe size without converting to RDD and using rdd.estimateCount() * * @param input * @return */ @Override public List<Long> dimensions(Dataset<Row> input) { Dataset<?> dimsAlias = input.alias(String.format("%s_%s", DBND_INTERNAL_ALIAS, "DIMS")); try { long rows = dimsAlias.count(); long columns = dimsAlias.columns().length; return Arrays.asList(rows, columns); } catch (Exception e) { return Arrays.asList(0L, 0L); } } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/Histogram.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import ai.databand.log.HistogramRequest; import ai.databand.schema.histograms.ColumnSummary; import ai.databand.schema.histograms.NumericSummary; import ai.databand.schema.histograms.Summary; import org.apache.spark.sql.Column; import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; import org.apache.spark.sql.types.BooleanType; import org.apache.spark.sql.types.DataType; import org.apache.spark.sql.types.FractionalType; import org.apache.spark.sql.types.IntegralType; import org.apache.spark.sql.types.NumericType; import org.apache.spark.sql.types.StringType; import org.apache.spark.sql.types.StructField; import scala.collection.JavaConverters; import scala.collection.Seq; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import static ai.databand.DbndPropertyNames.DBND_INTERNAL_ALIAS; import static org.apache.spark.sql.functions.col; import static org.apache.spark.sql.functions.count; import static org.apache.spark.sql.functions.desc; import static org.apache.spark.sql.functions.lit; import static org.apache.spark.sql.functions.when; public class Histogram { private static final int MAX_NUMERIC_BUCKETS_COUNT = 20; private static final int MAX_CATEGORICAL_BUCKETS_COUNT = 50; private final String dfKey; private final Dataset<?> dataset; private final HistogramRequest req; private final Map<String, Object> result; private final Map<String, Summary> summaries; public Histogram(String key, Dataset<?> dataset, HistogramRequest histogramRequest) { this.dfKey = key; this.dataset = dataset.alias(String.format("%s_%s", DBND_INTERNAL_ALIAS, "HISTOGRAM")); this.req = histogramRequest; result = new HashMap<>(1); summaries = new HashMap<>(1); } protected <T> Seq<T> seq(List<T> list) { return JavaConverters.collectionAsScalaIterableConverter(list).asScala().toSeq(); } public Map<String, Object> metricValues() { result.put(String.format("%s.stats", dfKey), summary()); if (req.isEnabled() && !req.isOnlyStats()) { Map<String, Object> histograms = new HashMap<>(1); if (req.isIncludeAllNumeric()) { histograms.putAll(numericHistograms()); } if (req.isIncludeAllString()) { histograms.putAll(categoricalHistograms(StringType.class)); } if (req.isIncludeAllBoolean()) { histograms.putAll(categoricalHistograms(BooleanType.class)); } result.put(String.format("%s.histograms", dfKey), histograms); } return result; } public Map<String, Map<String, Object>> summary() { Dataset<Row> summaryDf = dataset.summary(); Map<String, Integer> colToIdx = new HashMap<>(); for (int i = 0; i < summaryDf.columns().length; i++) { colToIdx.put(summaryDf.columns()[i], i); } List<Row> rawSummary = summaryDf.collectAsList(); Map<String, Row> summary = new HashMap<>(1); for (Row row : rawSummary) { summary.put(row.get(0).toString(), row); } // distinct, count, and non-null counts are calculated separately, because they are not included into default spark summary List<String> exprs = new ArrayList<>(1); for (StructField c : dataset.schema().fields()) { if (!isSimpleType(c.dataType()) || req.isExcluded(c.name())) { continue; } Column col = col(c.name()); // for some reason spark didn't escape numeric column names like `10` in DISTINCT query // so we have to escape colum name with backticks manually exprs.add(String.format("count(DISTINCT `%s`) AS `%s_%s`", c.name(), c.name(), "distinct")); exprs.add(count(col).alias(String.format("%s_%s", c.name(), "non-null")).toString()); exprs.add(count(when(col.isNull(), 1)).alias(String.format("%s_%s", c.name(), "count_null")).toString()); } Dataset<Row> countsDf = dataset.selectExpr(seq(exprs)); Row rawCounts = countsDf.collectAsList().get(0); String[] countsColumns = countsDf.columns(); Map<String, Object> counts = new HashMap<>(1); for (int i = 0; i < countsColumns.length; i++) { counts.put(countsColumns[i], rawCounts.get(i)); } Map<String, Map<String, Object>> stats = new HashMap<>(1); for (StructField c : dataset.schema().fields()) { if (!isSimpleType(c.dataType()) || req.isExcluded(c.name())) { continue; } Summary columnSummary = null; long nonNull = Long.parseLong(counts.get(String.format("%s_%s", c.name(), "non-null")).toString()); long countNull = Long.parseLong(counts.get(String.format("%s_%s", c.name(), "count_null")).toString()); if (c.dataType() instanceof NumericType) { int idx = colToIdx.get(c.name()); columnSummary = new NumericSummary( new ColumnSummary( nonNull + countNull, Long.parseLong(counts.get(String.format("%s_%s", c.name(), "distinct")).toString()), nonNull, countNull, (c.dataType() instanceof FractionalType) ? "double" : "integer" ), Double.parseDouble(summary.get("max").get(idx).toString()), Double.parseDouble(summary.get("mean").get(idx).toString()), Double.parseDouble(summary.get("min").get(idx).toString()), Double.parseDouble(summary.get("stddev").get(idx).toString()), Double.parseDouble(summary.get("25%").get(idx).toString()), Double.parseDouble(summary.get("50%").get(idx).toString()), Double.parseDouble(summary.get("75%").get(idx).toString()) ); } else if (c.dataType() instanceof StringType || c.dataType() instanceof BooleanType) { columnSummary = new ColumnSummary( nonNull + countNull, Long.parseLong(counts.get(String.format("%s_%s", c.name(), "distinct")).toString()), nonNull, countNull, (c.dataType() instanceof StringType) ? "string" : "boolean" ); } Map<String, Object> columnSummaryMap = columnSummary.toMap(); stats.put(c.name(), columnSummaryMap); for (Map.Entry<String, Object> entry : columnSummaryMap.entrySet()) { result.put(String.format("%s.%s.%s", dfKey, c.name(), entry.getKey()), entry.getValue()); } summaries.put(c.name(), columnSummary); } return stats; } public Map<String, Summary> getSummaries() { return summaries; } protected boolean isSimpleType(DataType dt) { return dt instanceof NumericType || dt instanceof StringType || dt instanceof BooleanType; } protected Map<String, Object[][]> numericHistograms() { List<Column> numericColumns = new ArrayList<>(1); List<String> histogramsExpr = new ArrayList<>(1); Map<String, Object[]> namedBuckets = new HashMap<>(1); for (StructField c : dataset.schema().fields()) { if (!(c.dataType() instanceof NumericType) || req.isExcluded(c.name())) { continue; } numericColumns.add(col(c.name())); long distinct = (long) result.get(String.format("%s.%s.%s", dfKey, c.name(), "distinct")); double minv = (double) result.get(String.format("%s.%s.%s", dfKey, c.name(), "min")); double maxv = (double) result.get(String.format("%s.%s.%s", dfKey, c.name(), "max")); int bucketsCount = (int) Math.min(distinct, MAX_NUMERIC_BUCKETS_COUNT); double inc; if (c.dataType() instanceof IntegralType) { inc = (int) ((maxv - minv) / bucketsCount); } else { inc = (maxv - minv) * 1.0 / bucketsCount; } Object[] buckets = new Object[bucketsCount + 1]; for (int i = 0; i < bucketsCount; i++) { buckets[i] = i * inc + minv; } buckets[bucketsCount] = maxv; namedBuckets.put(c.name(), buckets); for (int i = 0; i < buckets.length - 1; i++) { histogramsExpr.add( count( when( col(c.name()).geq(buckets[i]) .and(i == buckets.length - 2 ? col(c.name()).leq(buckets[i + 1]) : col(c.name()).lt(buckets[i + 1])), 1 ) ).alias(String.format("%s_%s", c.name(), i)).toString() ); } } Dataset<Row> histogramsDf = dataset.select(seq(numericColumns)).selectExpr(seq(histogramsExpr)); Row histograms = histogramsDf.collectAsList().get(0); Map<String, Object[][]> histogramsResult = new HashMap<>(1); for (String column : namedBuckets.keySet()) { Object[] buckets = namedBuckets.get(column); Object[] bucketCounts = new Object[buckets.length]; for (int i = 0; i < buckets.length - 1; i++) { bucketCounts[i] = histograms.getAs(String.format("%s_%s", column, i)); } histogramsResult.put(column, new Object[][]{bucketCounts, buckets}); } return histogramsResult; } List<Dataset<Row>> columnsOfType(Class<?> dataType) { return Arrays.stream(dataset.schema().fields()) .filter(f -> dataType.isInstance(f.dataType())) .filter(f -> !req.isExcluded(f.name())) .map(f -> dataset.select(f.name())) .collect(Collectors.toList()); } protected Map<String, List<List<Object>>> categoricalHistograms(Class<?> dataType) { List<Dataset<Row>> columnsDf = columnsOfType(dataType); if (columnsDf.isEmpty()) { return Collections.emptyMap(); } Dataset<Row> valueCounts = null; for (Dataset<Row> column : columnsDf) { String columnName = column.schema().names()[0]; Dataset<Row> columnCounts = column.groupBy(columnName) .count() .orderBy(desc("count")) .withColumn("column_name", lit(columnName)) .limit(MAX_CATEGORICAL_BUCKETS_COUNT - 1); if (valueCounts == null) { valueCounts = columnCounts; } else { valueCounts = valueCounts.union(columnCounts); } } Map<String, List<List<Object>>> histogramsResult = new HashMap<>(1); for (Row row : valueCounts.collectAsList()) { if (row.get(0) == null) { continue; } String bucket = row.get(0).toString(); long count = row.getLong(1); String columnName = row.getString(2); List<List<Object>> columnHistogram = histogramsResult.computeIfAbsent(columnName, c -> { List<List<Object>> pair = new ArrayList<>(2); pair.add(new ArrayList<>(1)); pair.add(new ArrayList<>(1)); return pair; }); columnHistogram.get(0).add(count); columnHistogram.get(1).add(bucket); } // add "others" for (Map.Entry<String, List<List<Object>>> column : histogramsResult.entrySet()) { Summary summary = summaries.get(column.getKey()); long distinct = summary.getDistinct(); if (distinct < MAX_CATEGORICAL_BUCKETS_COUNT) { continue; } long total = summary.getCount(); long histogramSumCount = column.getValue().get(0).stream().mapToLong(f -> (Long) f).sum(); long othersCount = total - histogramSumCount; column.getValue().get(0).add(othersCount); column.getValue().get(1).add("_others"); } return histogramsResult; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/NullPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import java.util.Collections; import java.util.List; public class NullPreview implements TaskParameterPreview<Object> { @Override public String compact(Object input) { return ""; } @Override public String full(Object input) { return ""; } @Override public String typeName(Class<Object> input) { return ""; } @Override public String schema(Object input) { return ""; } @Override public List<Long> dimensions(Object input) { return Collections.emptyList(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/ObjectPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import java.util.Collections; import java.util.List; public class ObjectPreview implements TaskParameterPreview<Object> { @Override public String compact(Object input) { return String.valueOf(input); } @Override public String full(Object input) { return String.valueOf(input); } @Override public String typeName(Class<Object> input) { return input.getTypeName(); } @Override public String schema(Object input) { return ""; } @Override public List<Long> dimensions(Object input) { return Collections.emptyList(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/ParametersPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import org.apache.spark.sql.Dataset; import java.util.HashMap; import java.util.Map; public class ParametersPreview { private final Map<String, TaskParameterPreview<?>> parameters; private final ObjectPreview objectPreview; public ParametersPreview(boolean previewEnabled) { parameters = new HashMap<>(1); if (previewEnabled) { parameters.put(String.class.getCanonicalName(), new StringPreview()); parameters.put(Dataset.class.getCanonicalName(), new DatasetPreview()); parameters.put(String[].class.getCanonicalName(), new StringArrayPreview()); } else { parameters.put(String.class.getCanonicalName(), new StringPreview()); parameters.put(String[].class.getCanonicalName(), new StringArrayPreview()); } objectPreview = new ObjectPreview(); } public TaskParameterPreview get(Class<?> clazz) { return parameters.getOrDefault(clazz.getCanonicalName(), objectPreview); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/StringArrayPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import java.util.Collections; import java.util.List; public class StringArrayPreview implements TaskParameterPreview<String[]> { @Override public String compact(String[] input) { if (input == null) { return "[]"; } StringBuilder builder = new StringBuilder(); builder.append("["); int idx = 0; for (String part : input) { if (builder.length() > 32) { builder.append("..."); break; } builder.append(part); if (idx < input.length - 1) { builder.append(", "); idx++; } } builder.append("]"); return builder.toString(); } @Override public String full(String[] input) { if (input == null) { return "[]"; } return String.join(", ", input); } @Override public String typeName(Class<String[]> input) { return "String[]"; } @Override public String schema(String[] input) { return ""; } @Override public List<Long> dimensions(String[] input) { return Collections.emptyList(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/StringPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import java.util.Collections; import java.util.List; public class StringPreview implements TaskParameterPreview<String> { @Override public String compact(String input) { return input == null ? "null" : input; } @Override public String full(String input) { return input == null ? "null" : input; } @Override public String typeName(Class<String> input) { return input.getTypeName(); } @Override public String schema(String input) { return ""; } @Override public List<Long> dimensions(String input) { return Collections.emptyList(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/parameters/TaskParameterPreview.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.parameters; import java.util.List; public interface TaskParameterPreview<T> { String compact(T input); String full(T input); String typeName(Class<T> input); Object schema(T input); List<Long> dimensions(T input); }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/AddTaskRuns.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class AddTaskRuns { private final TaskRunsInfo taskRunsInfo; public AddTaskRuns(TaskRunsInfo taskRunsInfo) { this.taskRunsInfo = taskRunsInfo; } public TaskRunsInfo getTaskRunsInfo() { return taskRunsInfo; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/AirflowTaskContext.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.id.Uuid5; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.UUID; public class AirflowTaskContext { private final String airflowInstanceUid; private final String airflowName; private final String dagId; private final String executionDate; private final String taskId; private final String tryNumber; public AirflowTaskContext(String airflowInstanceUid, String airflowName, String dagId, String executionDate, String taskId, String tryNumber) { this.airflowInstanceUid = airflowInstanceUid; this.airflowName = airflowName; this.dagId = dagId; this.executionDate = executionDate; this.taskId = taskId; this.tryNumber = tryNumber; } public String getAirflowInstanceUid() { return airflowInstanceUid; } public String getAirflowName() { return airflowName; } public String getDagId() { return dagId; } public String getExecutionDate() { return executionDate; } public String getTaskId() { return taskId; } public String getTryNumber() { return tryNumber; } @JsonIgnore public String getAfOperatorUid() { String airflowSyncRunUid = buildAirflowJobRunUid(); return buildAirflowTaskRunUid(airflowSyncRunUid); } /** * def get_job_run_uid(dag_id, execution_date): * if airflow_instance_uid is None: * return uuid.uuid5(NAMESPACE_DBND_RUN, "{}:{}".format(dag_id, execution_date)) * else: * return uuid.uuid5(NAMESPACE_DBND_RUN, "{}:{}:{}".format(airflow_instance_uid, dag_id, execution_date)) */ @JsonIgnore protected String buildAirflowJobRunUid() { if (airflowInstanceUid == null) { return new Uuid5( Uuid5.NAMESPACE_DBND_RUN, String.format("%s:%s", this.getDagId(), this.getExecutionDate()) ).toString(); } else { return new Uuid5( Uuid5.NAMESPACE_DBND_RUN, String.format("%s:%s:%s", this.getAirflowInstanceUid(), this.getDagId(), this.getExecutionDate()) ).toString(); } } /** * def get_task_run_uid(run_uid, dag_id, task_id): * return uuid.uuid5(run_uid, "{}.{}".format(dag_id, task_id)) */ @JsonIgnore protected String buildAirflowTaskRunUid(String runUid) { return new Uuid5( UUID.fromString(runUid), String.format("%s.%s", this.getDagId(), this.getTaskId()) ).toString(); } /** * def get_task_def_uid(dag_id, task_id): * return uuid.uuid5(NAMESPACE_DBND_TASK_DEF, "{}.{}".format(dag_id, task_id)) */ @JsonIgnore protected String buildAirflowTaskDefUid(AirflowTaskContext context) { return new Uuid5( Uuid5.NAMESPACE_DBND_TASK_DEF, String.format("%s.%s", context.getDagId(), context.getTaskId()) ).toString(); } /** * def get_task_run_attempt_uid(run_uid, dag_id, task_id, try_number): * return uuid.uuid5(run_uid, "{}.{}:{}".format(dag_id, task_id, try_number)) */ @JsonIgnore protected String buildAirflowTaskRunAttemptUid(String runUid, AirflowTaskContext context) { return new Uuid5( UUID.fromString(runUid), String.format("%s.%s:%s", context.getDagId(), context.getTaskId(), context.getTryNumber()) ).toString(); } /** * def get_job_uid(dag_id): * if airflow_server_info_uid: * return uuid.uuid5(NAMESPACE_DBND_JOB, "{}:{}".format(airflow_server_info_uid, dag_id)) * else: * return uuid.uuid5(NAMESPACE_DBND_JOB, dag_id) */ @JsonIgnore protected String buildAirflowJobUid(AirflowTaskContext context) { if (context.getAirflowInstanceUid() == null) { return new Uuid5(Uuid5.NAMESPACE_DBND_JOB, context.getDagId()).toString(); } else { return new Uuid5( Uuid5.NAMESPACE_DBND_JOB, String.format("%s:%s", context.getAirflowInstanceUid(), context.getDagId()) ).toString(); } } @JsonIgnore public String jobName() { return String.format("%s.%s", dagId, taskId); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/AzkabanTaskContext.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.config.DbndConfig; import ai.databand.id.Uuid5; import java.util.Optional; import static ai.databand.DbndPropertyNames.DBND__TRACKING__PROJECT; public class AzkabanTaskContext { private final String projectName; private final String flowId; private final String flowUuid; private final String executionId; private final String jobId; private final DbndConfig config; public AzkabanTaskContext(String projectName, String flowId, String flowUuid, String executionId, String jobId, DbndConfig config) { this.projectName = projectName; this.flowId = flowId; this.flowUuid = flowUuid; this.executionId = executionId; this.jobId = jobId; this.config = config; } public AzkabanTaskContext forJob(String jobId) { return new AzkabanTaskContext( this.projectName, this.flowId, this.flowUuid, this.executionId, jobId, config ); } public TrackingSource trackingSource() { if (config.getValue("azkaban.name").isPresent()) { return new TrackingSource(this); } else { return null; } } public String databandJobName() { return String.format("%s.%s", flowId, jobId); } public String projectName() { return projectName; } /** * Databand project name may be different from the Azkaban project name. * * @return */ public String databandProjectName() { return config.getValue(DBND__TRACKING__PROJECT).orElse(projectName); } public String flowId() { return flowId; } public String flowUuid() { return flowUuid; } public String executionId() { return executionId; } public String jobId() { return jobId; } public Optional<String> userId() { return Optional.of(System.getenv("azkaban.flow.submituser")); } public String azkabanInstanceId() { return String.format("%s:%s", config.getValue("azkaban.name").orElse("azkaban"), config.getValue("azkaban.label").orElse("azkaban")); } public String azkabanInstanceUuid() { return new Uuid5("AZ_INSTANCE_UUID", azkabanInstanceId()).toString(); } public String azkabanUrl() { String hostName = config.getValue("server.hostname").orElse("localhost"); String port = config.getValue("server.port").orElse("8081"); String protocol = Boolean.TRUE.toString().equalsIgnoreCase(config.getValue("jetty.use.ssl").orElse(Boolean.FALSE.toString())) ? "https" : "http"; return String.format("%s://%s:%s", protocol, hostName, port); } public RootRun root() { return new RootRun( "", taskRunUid(), rootRunUid(), taskRunAttemptUid() ); } /** * project_name - flow_name - execution_id * * @return */ public String runName() { return String.format("%s-%s-%s", projectName, flowId, executionId); } /** * project_name - flow_name - execution_id * * @return */ public String jobRunName() { return String.format("%s-%s-%s-%s", projectName, flowId, jobId, executionId); } /** * Root run (flow) UID. * * @return */ public String rootRunUid() { return new Uuid5("RUN_UID", flowUuid).toString(); } /** * Driver task (flow) UID. * * @return */ public String driverTaskUid() { return new Uuid5("DRIVER_TASK", flowUuid).toString(); } public String driverTaskRunEnvUid() { return new Uuid5("TASK_RUN_ENV_UID", flowUuid).toString(); } public String driverTaskDefinitionUid() { return new Uuid5("TASK_DEFINITION", flowUuid).toString(); } /** * Driver task (flow) attempt UID. * * @return */ public String driverTaskRunAttemptUid() { return new Uuid5("TASK_RUN_ATTEMPT", flowUuid).toString(); } /** * Task (job) run UID. * * @return */ public String taskRunUid() { String taskRunId = jobId + flowUuid; return new Uuid5("TASK_RUN_UID", taskRunId).toString(); } /** * Task (job) run attempt UID. * * @return */ public String taskRunAttemptUid() { String taskRunId = jobId + flowUuid; return new Uuid5("TASK_RUN_ATTEMPT", taskRunId).toString(); } public String taskDefinitionUid() { String taskRunId = jobId + flowUuid; return new Uuid5("TASK_DEFINITION", taskRunId).toString(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/ColumnStats.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonProperty; public class ColumnStats { String columnName; String columnType; Long recordsCount; Long distinctCount; // Metric for non-numeric column type Long uniqueCount; // Most frequent value Object mostFreqValue; Long mostFreqValueCount; // numeric column type metrics Double meanValue; Double minValue; Double maxValue; Double stdValue; // percentiles @JsonProperty("quartile_1") Double quartile1; @JsonProperty("quartile_2") Double quartile2; @JsonProperty("quartile_3") Double quartile3; public String getColumnName() { return columnName; } public ColumnStats setColumnName(String columnName) { this.columnName = columnName; return this; } public String getColumnType() { return columnType; } public ColumnStats setColumnType(String columnType) { this.columnType = columnType; return this; } public Long getRecordsCount() { return recordsCount; } public ColumnStats setRecordsCount(Long recordsCount) { this.recordsCount = recordsCount; return this; } public Long getDistinctCount() { return distinctCount; } public ColumnStats setDistinctCount(Long distinctCount) { this.distinctCount = distinctCount; return this; } public Long getUniqueCount() { return uniqueCount; } public ColumnStats setUniqueCount(Long uniqueCount) { this.uniqueCount = uniqueCount; return this; } public Object getMostFreqValue() { return mostFreqValue; } public ColumnStats setMostFreqValue(Object mostFreqValue) { this.mostFreqValue = mostFreqValue; return this; } public Long getMostFreqValueCount() { return mostFreqValueCount; } public ColumnStats setMostFreqValueCount(Long mostFreqValueCount) { this.mostFreqValueCount = mostFreqValueCount; return this; } public Double getMeanValue() { return meanValue; } public ColumnStats setMeanValue(Double meanValue) { this.meanValue = meanValue; return this; } public Double getMinValue() { return minValue; } public ColumnStats setMinValue(Double minValue) { this.minValue = minValue; return this; } public Double getMaxValue() { return maxValue; } public ColumnStats setMaxValue(Double maxValue) { this.maxValue = maxValue; return this; } public Double getStdValue() { return stdValue; } public ColumnStats setStdValue(Double stdValue) { this.stdValue = stdValue; return this; } public Double getQuartile1() { return quartile1; } public ColumnStats setQuartile1(Double quartile1) { this.quartile1 = quartile1; return this; } public Double getQuartile2() { return quartile2; } public ColumnStats setQuartile2(Double quartile2) { this.quartile2 = quartile2; return this; } public Double getQuartile3() { return quartile3; } public ColumnStats setQuartile3(Double quartile3) { this.quartile3 = quartile3; return this; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/DatabandTaskContext.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class DatabandTaskContext { private final String rootRunUid; private final String taskRunUid; private final String taskRunAttemptUid; private final String traceId; public DatabandTaskContext(String rootRunUid, String taskRunUid, String taskRunAttemptUid, String traceId) { this.rootRunUid = rootRunUid; this.taskRunUid = taskRunUid; this.taskRunAttemptUid = taskRunAttemptUid; this.traceId = traceId; } public String getRootRunUid() { return rootRunUid; } public String getTaskRunUid() { return taskRunUid; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getTraceId() { return traceId; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/DatasetOperationRes.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; @JsonIgnoreProperties(ignoreUnknown = true) public class DatasetOperationRes { private String latestOperationStatus; private long records; private long operations; private String datasetPath; private String taskRunUid; private String operationType; private String operationSource; private String taskRunName; private List<Issue> issues; private List<ColumnStats> columnsStats; public String getLatestOperationStatus() { return latestOperationStatus; } public void setLatestOperationStatus(String latestOperationStatus) { this.latestOperationStatus = latestOperationStatus; } public long getRecords() { return records; } public void setRecords(long records) { this.records = records; } public long getOperations() { return operations; } public void setOperations(long operations) { this.operations = operations; } public String getDatasetPath() { return datasetPath; } public void setDatasetPath(String datasetPath) { this.datasetPath = datasetPath; } public String getTaskRunUid() { return taskRunUid; } public void setTaskRunUid(String taskRunUid) { this.taskRunUid = taskRunUid; } public String getOperationType() { return operationType; } public void setOperationType(String operationType) { this.operationType = operationType; } public String getOperationSource() { return operationSource; } public void setOperationSource(String operationSource) { this.operationSource = operationSource; } public String getTaskRunName() { return taskRunName; } public void setTaskRunName(String taskRunName) { this.taskRunName = taskRunName; } public List<Issue> getIssues() { return issues; } public void setIssues(List<Issue> issues) { this.issues = issues; } public List<ColumnStats> getColumnsStats() { return columnsStats; } public void setColumnsStats(List<ColumnStats> columnsStats) { this.columnsStats = columnsStats; } @JsonIgnoreProperties(ignoreUnknown = true) public static class Issue { private String type; private Data data; public String getType() { return type; } public void setType(String type) { this.type = type; } public Data getData() { return data; } public void setData(Data data) { this.data = data; } } @JsonIgnoreProperties(ignoreUnknown = true) public static class Data { private String operationError; public String getOperationError() { return operationError; } public void setOperationError(String operationError) { this.operationError = operationError; } } @Override public String toString() { return String.format("{path: \"%s\", type: %s, source: %s", datasetPath, operationType, operationSource); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/DatasetOperationSchema.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; import java.util.Map; public class DatasetOperationSchema { private final List<String> columns; private final Map<String, String> dtypes; private final List<Long> shape; private static final String SPARK_DATAFRAME_TYPE = "Spark.DataFrame"; public DatasetOperationSchema(List<String> columns, Map<String, String> dtypes, List<Long> shape) { this.columns = columns; this.dtypes = dtypes; this.shape = shape; } public List<String> getColumns() { return columns; } public Map<String, String> getDtypes() { return dtypes; } public List<Long> getShape() { return shape; } public String getType() { return SPARK_DATAFRAME_TYPE; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/DatasetOperationStatus.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public enum DatasetOperationStatus { OK("OK"), NOK("NOK"); private final String name; DatasetOperationStatus(String name) { this.name = name; } @Override public String toString() { return name; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/DatasetOperationType.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public enum DatasetOperationType { READ("read"), WRITE("write"), DELETE("delete"); private final String name; DatasetOperationType(String name) { this.name = name; } @Override public String toString() { return name; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/ErrorInfo.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class ErrorInfo { private String msg; private String helpMsg; private Boolean databandError; private String traceback; private String nested; private String userCodeTraceback; private Boolean showExcInfo; private String excType; public ErrorInfo() { } public ErrorInfo(String msg, String helpMsg, Boolean databandError, String traceback, String nested, String userCodeTraceback, Boolean showExcInfo, String excType) { this.msg = msg; this.helpMsg = helpMsg; this.databandError = databandError; this.traceback = traceback; this.nested = nested; this.userCodeTraceback = userCodeTraceback; this.showExcInfo = showExcInfo; this.excType = excType; } public String getMsg() { return msg; } public String getHelpMsg() { return helpMsg; } public Boolean getDatabandError() { return databandError; } public String getTraceback() { return traceback; } public String getNested() { return nested; } public String getUserCodeTraceback() { return userCodeTraceback; } public Boolean getShowExcInfo() { return showExcInfo; } public Object getExcType() { return excType; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/GetRunsResponse.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; @JsonIgnoreProperties(ignoreUnknown = true) public class GetRunsResponse { private List<Run> data; public List<Run> getData() { return data; } public void setData(List<Run> data) { this.data = data; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/InitRun.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class InitRun { private final InitRunArgs initArgs; public InitRun(InitRunArgs initRunArgs) { this.initArgs = initRunArgs; } public InitRunArgs getInitArgs() { return initArgs; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/InitRunArgs.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class InitRunArgs { private final String runUid; private final String rootRunUid; private final String driverTaskUid; private final TaskRunEnv taskRunEnv; private final TaskRunsInfo taskRunsInfo; private final NewRunInfo newRunInfo; private final AirflowTaskContext afContext; private final String source; private final TrackingSource trackingSource; public InitRunArgs(String runUid, String rootRunUid, String driverTaskUid, NewRunInfo newRunInfo, TaskRunEnv taskRunEnv, TaskRunsInfo taskRunsInfo, AirflowTaskContext afContext, String source, TrackingSource trackingSource) { this.runUid = runUid; this.rootRunUid = rootRunUid; this.driverTaskUid = driverTaskUid; this.newRunInfo = newRunInfo; this.taskRunEnv = taskRunEnv; this.taskRunsInfo = taskRunsInfo; this.afContext = afContext; this.source = source; this.trackingSource = trackingSource; } public InitRunArgs(String runUid, String rootRunUid, String driverTaskUid, NewRunInfo newRunInfo, TaskRunEnv taskRunEnv, TaskRunsInfo taskRunsInfo) { this(runUid, rootRunUid, driverTaskUid, newRunInfo, taskRunEnv, taskRunsInfo, null, "generic_tracking", null ); } public String getRunUid() { return runUid; } public String getRootRunUid() { return rootRunUid; } public String getDriverTaskUid() { return driverTaskUid; } public NewRunInfo getNewRunInfo() { return newRunInfo; } public TaskRunEnv getTaskRunEnv() { return taskRunEnv; } public TaskRunsInfo getTaskRunsInfo() { return taskRunsInfo; } public AirflowTaskContext getAfContext() { return afContext; } public TrackingSource getTrackingSource() { return trackingSource; } public String getSource() { return source; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/Job.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class Job { private String name; private String user; private String latestRunUid; private String latestRootTaskRunUid; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getUser() { return user; } public void setUser(String user) { this.user = user; } public String getLatestRunUid() { return latestRunUid; } public void setLatestRunUid(String latestRunUid) { this.latestRunUid = latestRunUid; } public String getLatestRootTaskRunUid() { return latestRootTaskRunUid; } public void setLatestRootTaskRunUid(String latestRootTaskRunUid) { this.latestRootTaskRunUid = latestRootTaskRunUid; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogDataset.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class LogDataset { public static final String OP_SOURCE_SPARK_QUERY_LISTENER = "spark_query_listener"; public static final String OP_SOURCE_JAVA_MANUAL_LOGGING = "java_manual_logging"; private final String runUid; private final String taskRunUid; private final String taskRunName; private final String taskRunAttemptUid; private final String operationPath; private final String operationType; private final String operationStatus; private final String operationError; private final String valuePreview; private final List<Long> dataDimensions; private final Object dataSchema; private final Boolean withPartition; private final List<ColumnStats> columnsStats; private final String operationSource; public LogDataset(TaskRun taskRun, String operationPath, DatasetOperationType operationType, DatasetOperationStatus operationStatus, String operationError, String valuePreview, List<Long> dataDimensions, Object dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource) { this( taskRun.getRunUid(), taskRun.getTaskRunUid(), taskRun.getName(), taskRun.getTaskRunAttemptUid(), operationPath, operationType, operationStatus, operationError, valuePreview, dataDimensions, dataSchema, withPartition, columnStats, operationSource ); } public LogDataset(String runUid, String taskRunUid, String taskRunName, String taskRunAttemptUid, String operationPath, DatasetOperationType operationType, DatasetOperationStatus operationStatus, String operationError, String valuePreview, List<Long> dataDimensions, Object dataSchema, Boolean withPartition, List<ColumnStats> columnStats, String operationSource) { this.runUid = runUid; this.taskRunUid = taskRunUid; this.taskRunName = taskRunName; this.taskRunAttemptUid = taskRunAttemptUid; this.operationPath = operationPath; this.operationType = operationType.toString(); this.operationStatus = operationStatus.toString(); this.operationError = operationError; this.valuePreview = valuePreview; this.dataDimensions = dataDimensions; this.dataSchema = dataSchema; this.withPartition = withPartition; this.columnsStats = columnStats; this.operationSource = operationSource; } public String getRunUid() { return runUid; } public String getTaskRunUid() { return taskRunUid; } public String getTaskRunName() { return taskRunName; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getOperationPath() { return operationPath; } public String getOperationType() { return operationType; } public String getOperationStatus() { return operationStatus; } public String getOperationError() { return operationError; } public String getValuePreview() { return valuePreview; } public List<Long> getDataDimensions() { return dataDimensions; } public Object getDataSchema() { return dataSchema; } public Boolean getWithPartition() { return withPartition; } public List<ColumnStats> getColumnsStats() { return columnsStats; } public String getOperationSource() { return operationSource; } @Override public String toString() { if (operationPath == null || operationType == null) { return "{empty operation}"; } return String.format("{path: [%s], type: [%s], status: [%s]}", operationPath, operationType, operationStatus); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogDatasets.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class LogDatasets { private final List<LogDataset> datasetsInfo; public LogDatasets(List<LogDataset> datasetsInfo) { this.datasetsInfo = datasetsInfo; } public List<LogDataset> getDatasetsInfo() { return datasetsInfo; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogMetric.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class LogMetric { private final String taskRunAttemptUid; private final Metric metric; private final String source; public LogMetric(String taskRunAttemptUid, Metric metric, String source) { this.taskRunAttemptUid = taskRunAttemptUid; this.metric = metric; this.source = source; } public LogMetric(String taskRunAttemptUid, Metric metric) { this.taskRunAttemptUid = taskRunAttemptUid; this.metric = metric; this.source = null; } public Metric getMetric() { return metric; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getSource() { return source; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogMetrics.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class LogMetrics { private final List<LogMetric> metricsInfo; public LogMetrics(List<LogMetric> metricsInfo) { this.metricsInfo = metricsInfo; } public List<LogMetric> getMetricsInfo() { return metricsInfo; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogTarget.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class LogTarget { private final String runUid; private final String taskRunUid; private final String taskRunName; private final String taskRunAttemptUid; private final String targetPath; private final String paramName; private final String taskDefUid; private final String operationType; private final String operationStatus; private final String valuePreview; private final List<Long> dataDimensions; private final Object dataSchema; private final String dataHash; public LogTarget(String runUid, String taskRunUid, String taskRunName, String taskRunAttemptUid, String targetPath, String paramName, String taskDefUid, String operationType, String operationStatus, String valuePreview, List<Long> dataDimensions, Object dataSchema, String dataHash) { this.runUid = runUid; this.taskRunUid = taskRunUid; this.taskRunName = taskRunName; this.taskRunAttemptUid = taskRunAttemptUid; this.targetPath = targetPath; this.paramName = paramName; this.taskDefUid = taskDefUid; this.operationType = operationType; this.operationStatus = operationStatus; this.valuePreview = valuePreview; this.dataDimensions = dataDimensions; this.dataSchema = dataSchema; this.dataHash = dataHash; } public String getRunUid() { return runUid; } public String getTaskRunUid() { return taskRunUid; } public String getTaskRunName() { return taskRunName; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getTargetPath() { return targetPath; } public String getParamName() { return paramName; } public String getTaskDefUid() { return taskDefUid; } public String getOperationType() { return operationType; } public String getOperationStatus() { return operationStatus; } public String getValuePreview() { return valuePreview; } public List<Long> getDataDimensions() { return dataDimensions; } public Object getDataSchema() { return dataSchema; } public String getDataHash() { return dataHash; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/LogTargets.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class LogTargets { private final List<LogTarget> targetsInfo; public LogTargets(List<LogTarget> targetsInfo) { this.targetsInfo = targetsInfo; } public List<LogTarget> getTargetsInfo() { return targetsInfo; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/Metric.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.ZonedDateTime; public class Metric { private final String key; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private final ZonedDateTime timestamp; private Object value; private Integer valueInt; private Double valueFloat; private Object valueJson; private String valueStr; public Metric(String key, Object metricValue, ZonedDateTime timestamp) { this.key = key; this.value = metricValue; if (metricValue instanceof String) { this.valueStr = (String) metricValue; try { valueInt = new Integer(valueStr); } catch (NumberFormatException e) { valueInt = null; } if (valueStr.contains(".")) { try { valueFloat = new Double(valueStr); } catch (NumberFormatException e) { valueFloat = null; } } } else if (metricValue instanceof Long) { Long value = (Long) metricValue; if (value <= Integer.MAX_VALUE) { this.valueInt = value.intValue(); } else { this.valueStr = value.toString(); } } else if (metricValue instanceof Integer) { this.valueInt = (Integer) metricValue; } else if (metricValue instanceof Double) { this.valueFloat = (Double) metricValue; } else { this.valueJson = metricValue; } this.timestamp = timestamp; } public String getKey() { return key; } public Object getValue() { return value; } public ZonedDateTime getTimestamp() { return timestamp; } public Integer getValueInt() { return valueInt; } public Double getValueFloat() { return valueFloat; } public Object getValueJson() { return valueJson; } public String getValueStr() { return valueStr; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/MetricForAlerts.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class MetricForAlerts { private String taskName; private String metricName; public String getTaskName() { return taskName; } public void setTaskName(String taskName) { this.taskName = taskName; } public String getMetricName() { return metricName; } public void setMetricName(String metricName) { this.metricName = metricName; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/MetricsForAlertsResponse.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; @JsonIgnoreProperties(ignoreUnknown = true) public class MetricsForAlertsResponse { private List<MetricForAlerts> data; public List<MetricForAlerts> getData() { return data; } public void setData(List<MetricForAlerts> data) { this.data = data; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/NewRunInfo.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.ZonedDateTime; public class NewRunInfo { private String runUid; private String jobName; private String user; private String name; private String description; private String state; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime startTime; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime endTime; /** * To deprecate. */ private String dagId; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime executionDate; /** * Task attributes. */ @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime targetDate; private String version; private String driverName; private boolean isArchived; private String envName; private String cloudType; private String trigger; private RootRun rootRun; private String scheduledRun; private boolean sendsHeartbeat; private String taskExecutor; private String projectName; public NewRunInfo(String scheduledRun, String envName, ZonedDateTime endTime, String taskExecutor, ZonedDateTime targetDate, ZonedDateTime executionDate, boolean sendsHeartbeat, String driverName, boolean isArchived, String runUid, String cloudType, String trigger, String version, String jobName, String user, String description, String name, String state, ZonedDateTime startTime, RootRun rootRun) { this( scheduledRun, envName, endTime, taskExecutor, targetDate, executionDate, sendsHeartbeat, driverName, isArchived, runUid, cloudType, trigger, version, jobName, user, description, name, state, startTime, rootRun, null); } public NewRunInfo(String scheduledRun, String envName, ZonedDateTime endTime, String taskExecutor, ZonedDateTime targetDate, ZonedDateTime executionDate, boolean sendsHeartbeat, String driverName, boolean isArchived, String runUid, String cloudType, String trigger, String version, String jobName, String user, String description, String name, String state, ZonedDateTime startTime, RootRun rootRun, String projectName) { this.scheduledRun = scheduledRun; this.envName = envName; this.endTime = endTime; this.taskExecutor = taskExecutor; this.targetDate = targetDate; this.executionDate = executionDate; this.sendsHeartbeat = sendsHeartbeat; this.driverName = driverName; this.isArchived = isArchived; this.runUid = runUid; this.cloudType = cloudType; this.trigger = trigger; this.version = version; this.jobName = jobName; this.user = user; this.description = description; this.name = name; this.state = state; this.startTime = startTime; this.rootRun = rootRun; this.projectName = projectName; } public String getScheduledRun() { return scheduledRun; } public String getEnvName() { return envName; } public ZonedDateTime getEndTime() { return endTime; } public String getTaskExecutor() { return taskExecutor; } public ZonedDateTime getTargetDate() { return targetDate; } public ZonedDateTime getExecutionDate() { return executionDate; } public boolean isSendsHeartbeat() { return sendsHeartbeat; } public String getDriverName() { return driverName; } public boolean isArchived() { return isArchived; } public String getRunUid() { return runUid; } public String getCloudType() { return cloudType; } public String getTrigger() { return trigger; } public String getVersion() { return version; } public String getJobName() { return jobName; } public String getUser() { return user; } public String getDescription() { return description; } public String getDagId() { return ""; } public String getName() { return name; } public String getState() { return state; } public ZonedDateTime getStartTime() { return startTime; } public RootRun getRootRun() { return rootRun; } public String getProjectName() { return projectName; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/NodeInfo.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class NodeInfo { private Integer id; private String uid; private String state; private String taskId; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/NodeRelationInfo.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class NodeRelationInfo { private Integer id; private Integer downstreamTrId; private Integer upstreamTrId; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public Integer getDownstreamTrId() { return downstreamTrId; } public void setDownstreamTrId(Integer downstreamTrId) { this.downstreamTrId = downstreamTrId; } public Integer getUpstreamTrId() { return upstreamTrId; } public void setUpstreamTrId(Integer upstreamTrId) { this.upstreamTrId = upstreamTrId; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/PaginatedData.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class PaginatedData<T> { private List<T> data; private PaginationMeta meta; public List<T> getData() { return data; } public void setData(List<T> data) { this.data = data; } public PaginationMeta getMeta() { return meta; } public void setMeta(PaginationMeta meta) { this.meta = meta; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/PaginationMeta.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class PaginationMeta { private int total; public int getTotal() { return total; } public void setTotal(int total) { this.total = total; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/Pair.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class Pair<A, B> { private final A left; private final B right; public Pair(A left, B right) { this.left = left; this.right = right; } public A left() { return left; } public B right() { return right; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/RootRun.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; public class RootRun { private final String rootRunUrl; private final String rootTaskRunUid; private final String rootRunUid; private final String rootTaskRunAttemptUid; public RootRun(String rootRunUrl, String rootTaskRunUid, String rootRunUid, String rootTaskRunAttemptUid) { this.rootRunUrl = rootRunUrl; this.rootTaskRunUid = rootTaskRunUid; this.rootRunUid = rootRunUid; this.rootTaskRunAttemptUid = rootTaskRunAttemptUid; } public String getRootRunUrl() { return rootRunUrl; } public String getRootTaskRunUid() { return rootTaskRunUid; } public String getRootRunUid() { return rootRunUid; } public String getRootTaskRunAttemptUid() { return rootTaskRunAttemptUid; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/Run.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class Run { private String rootRunUid; private String projectName; public String getRootRunUid() { return rootRunUid; } public void setRootRunUid(String rootRunUid) { this.rootRunUid = rootRunUid; } public String getProjectName() { return projectName; } public void setProjectName(String projectName) { this.projectName = projectName; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/RunAndDefinition.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class RunAndDefinition { private final TaskRun taskRun; private final TaskDefinition taskDefinition; private final List<LogTarget> targets; public RunAndDefinition(TaskRun taskRun, TaskDefinition taskDefinition, List<LogTarget> targets) { this.taskRun = taskRun; this.taskDefinition = taskDefinition; this.targets = targets; } public TaskRun taskRun() { return taskRun; } public TaskDefinition taskDefinition() { return taskDefinition; } public List<LogTarget> targets() { return targets; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/SaveExternalLinks.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.Map; public class SaveExternalLinks { private final String taskRunAttemptUid; private final Map<String, String> externalLinksDict; public SaveExternalLinks(String taskRunAttemptUid, Map<String, String> externalLinksDict) { this.taskRunAttemptUid = taskRunAttemptUid; this.externalLinksDict = externalLinksDict; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public Map<String, String> getExternalLinksDict() { return externalLinksDict; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/SaveTaskRunLog.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.config.DbndConfig; import ai.databand.log.TruncatedLog; public class SaveTaskRunLog { private final String taskRunAttemptUid; private final TruncatedLog logBody; public SaveTaskRunLog(DbndConfig config, String taskRunAttemptUid, String logBody) { this(taskRunAttemptUid, new TruncatedLog(config, logBody)); } public SaveTaskRunLog(String taskRunAttemptUid, TruncatedLog logBody) { this.taskRunAttemptUid = taskRunAttemptUid; this.logBody = logBody; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getLogBody() { return logBody.toString(); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/SetRunState.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.ZonedDateTime; public class SetRunState { private final String runUid; private final String state; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private final ZonedDateTime timeStamp; public SetRunState(String runUid, String state, ZonedDateTime timeStamp) { this.runUid = runUid; this.state = state; this.timeStamp = timeStamp; } public String getRunUid() { return runUid; } public String getState() { return state; } public ZonedDateTime getTimeStamp() { return timeStamp; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/Target.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.ZonedDateTime; public class Target { private String taskRunUid; private String parameterName; private String path; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime createdDate; }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TargetOperation.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; @JsonIgnoreProperties(ignoreUnknown = true) public class TargetOperation { private String path; private String targetUid; private String dataHash; private String taskRunParamUid; private String paramName; private String taskRunUid; private String runUid; private String taskRunName; private String valuePreview; private String operationType; private String uid; private String dataSchema; private List<Long> dataDimensions; public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getTargetUid() { return targetUid; } public void setTargetUid(String targetUid) { this.targetUid = targetUid; } public String getDataHash() { return dataHash; } public void setDataHash(String dataHash) { this.dataHash = dataHash; } public String getTaskRunParamUid() { return taskRunParamUid; } public void setTaskRunParamUid(String taskRunParamUid) { this.taskRunParamUid = taskRunParamUid; } public String getParamName() { return paramName; } public void setParamName(String paramName) { this.paramName = paramName; } public String getTaskRunUid() { return taskRunUid; } public void setTaskRunUid(String taskRunUid) { this.taskRunUid = taskRunUid; } public String getRunUid() { return runUid; } public void setRunUid(String runUid) { this.runUid = runUid; } public String getTaskRunName() { return taskRunName; } public void setTaskRunName(String taskRunName) { this.taskRunName = taskRunName; } public String getValuePreview() { return valuePreview; } public void setValuePreview(String valuePreview) { this.valuePreview = valuePreview; } public String getOperationType() { return operationType; } public void setOperationType(String operationType) { this.operationType = operationType; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getDataSchema() { return dataSchema; } public void setDataSchema(String dataSchema) { this.dataSchema = dataSchema; } public List<Long> getDataDimensions() { return dataDimensions; } public void setDataDimensions(List<Long> dataDimensions) { this.dataDimensions = dataDimensions; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskDefinition.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import java.util.List; public class TaskDefinition { private final String name; private final String moduleSource; private final String sourceHash; private final String classVersion; private final String taskDefinitionUid; private final String moduleSourceHash; private final List<TaskParamDefinition> taskParamDefinitions; private final String family; private final String type; private final String source; public TaskDefinition(String name, String moduleSource, String sourceHash, String classVersion, String taskDefinitionUid, String moduleSourceHash, List<TaskParamDefinition> taskParamDefinitions, String family, String type, String source) { this.name = name; this.moduleSource = moduleSource; this.sourceHash = sourceHash; this.classVersion = classVersion; this.taskDefinitionUid = taskDefinitionUid; this.moduleSourceHash = moduleSourceHash; this.taskParamDefinitions = taskParamDefinitions; this.family = family; this.type = type; this.source = source; } public String getName() { return name; } public String getModuleSource() { return moduleSource; } public String getSourceHash() { return sourceHash; } public String getClassVersion() { return classVersion; } public String getTaskDefinitionUid() { return taskDefinitionUid; } public String getModuleSourceHash() { return moduleSourceHash; } public List<TaskParamDefinition> getTaskParamDefinitions() { return taskParamDefinitions; } public String getFamily() { return family; } public String getType() { return type; } public String getSource() { return source; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskFullGraph.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import java.util.List; import java.util.Map; @JsonIgnoreProperties(ignoreUnknown = true) public class TaskFullGraph { private String rootTaskRunUid; private int root; private Map<String, NodeInfo> nodesInfo; private List<NodeRelationInfo> children; private List<NodeRelationInfo> upstreams; public String getRootTaskRunUid() { return rootTaskRunUid; } public void setRootTaskRunUid(String rootTaskRunUid) { this.rootTaskRunUid = rootTaskRunUid; } public int getRoot() { return root; } public void setRoot(int root) { this.root = root; } public Map<String, NodeInfo> getNodesInfo() { return nodesInfo; } public void setNodesInfo(Map<String, NodeInfo> nodesInfo) { this.nodesInfo = nodesInfo; } public List<NodeRelationInfo> getChildren() { return children; } public void setChildren(List<NodeRelationInfo> children) { this.children = children; } public List<NodeRelationInfo> getUpstreams() { return upstreams; } public void setUpstreams(List<NodeRelationInfo> upstreams) { this.upstreams = upstreams; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskParamDefinition.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonProperty; public class TaskParamDefinition { private String name; private String kind; private String group; private boolean significant; private boolean loadOnBuild; private String valueType; private String description; @JsonProperty("default") private String defaultValue; public TaskParamDefinition(String name, String kind, String group, boolean significant, boolean loadOnBuild, String valueType, String description, String defaultValue) { this.name = name; this.kind = kind; this.group = group; this.significant = significant; this.loadOnBuild = loadOnBuild; this.valueType = valueType; this.description = description; this.defaultValue = defaultValue; } public String getName() { return name; } public String getKind() { return kind; } public String getGroup() { return group; } public boolean isSignificant() { return significant; } public boolean isLoadOnBuild() { return loadOnBuild; } public String getValueType() { return valueType; } public String getDescription() { return description; } public String getDefaultValue() { return defaultValue; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskRun.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.LocalDateDeserializer; import ai.databand.schema.jackson.LocalDateSerializer; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.LocalDate; import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @JsonIgnoreProperties(ignoreUnknown = true) public class TaskRun { private String uid; private String runUid; private boolean isRoot; private boolean isSystem; private String logRemote; private String version; private String taskRunUid; private String taskSignature; private String name; private List<TaskRunParam> taskRunParams; private String outputSignature; private boolean isSkipped; @JsonSerialize(using = LocalDateSerializer.class) @JsonDeserialize(using = LocalDateDeserializer.class) private LocalDate targetDate; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime executionDate; private String logLocal; private String state; private String taskDefinitionUid; private String commandLine; private boolean isReused; private boolean hasUpstreams; private String taskRunAttemptUid; private String taskAfId; private boolean isDynamic; private boolean hasDownstreams; private String functionalCall; private String taskId; private String env; @JsonIgnore private StringBuilder logBuffer; @JsonIgnore private Map<String, Object> metrics; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime startDate; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private ZonedDateTime endDate; @JsonIgnore private List<TaskRun> upstreamTasks = new ArrayList<>(1); private Integer latestTaskRunAttemptId; private Map<String, String> externalLinks; public TaskRun() { } public TaskRun(String runUid, boolean isRoot, boolean isSystem, String logRemote, String version, String taskRunUid, String taskSignature, String name, List<TaskRunParam> taskRunParams, String outputSignature, boolean isSkipped, LocalDate targetDate, ZonedDateTime executionDate, String logLocal, String state, String taskDefinitionUid, String commandLine, boolean isReused, boolean hasUpstreams, String taskRunAttemptUid, String taskAfId, boolean isDynamic, boolean hasDownstreams, String functionalCall, String taskId, String env, Map<String, String> externalLinks) { this.runUid = runUid; this.isRoot = isRoot; this.isSystem = isSystem; this.logRemote = logRemote; this.version = version; this.taskRunUid = taskRunUid; this.taskSignature = taskSignature; this.name = name; this.taskRunParams = taskRunParams; this.outputSignature = outputSignature; this.isSkipped = isSkipped; this.targetDate = targetDate; this.executionDate = executionDate; this.logLocal = logLocal; this.state = state; this.taskDefinitionUid = taskDefinitionUid; this.commandLine = commandLine; this.isReused = isReused; this.hasUpstreams = hasUpstreams; this.taskRunAttemptUid = taskRunAttemptUid; this.taskAfId = taskAfId; this.isDynamic = isDynamic; this.hasDownstreams = hasDownstreams; this.functionalCall = functionalCall; this.taskId = taskId; this.env = env; this.externalLinks = externalLinks; } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getRunUid() { return runUid; } public void setRunUid(String runUid) { this.runUid = runUid; } public boolean getIsRoot() { return isRoot; } public boolean getIsSystem() { return isSystem; } public String getLogRemote() { return logRemote; } public void setLogRemote(String logRemote) { this.logRemote = logRemote; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } public String getTaskRunUid() { return taskRunUid; } public void setTaskRunUid(String taskRunUid) { this.taskRunUid = taskRunUid; } public String getTaskSignature() { return taskSignature; } public void setTaskSignature(String taskSignature) { this.taskSignature = taskSignature; } public String getName() { return name; } public void setName(String name) { this.name = name; } public List<TaskRunParam> getTaskRunParams() { return taskRunParams; } public void setTaskRunParams(List<TaskRunParam> taskRunParams) { this.taskRunParams = taskRunParams; } public String getOutputSignature() { return outputSignature; } public void setOutputSignature(String outputSignature) { this.outputSignature = outputSignature; } public boolean getIsSkipped() { return isSkipped; } public LocalDate getTargetDate() { return targetDate; } public void setTargetDate(LocalDate targetDate) { this.targetDate = targetDate; } public ZonedDateTime getExecutionDate() { return executionDate; } public void setExecutionDate(ZonedDateTime executionDate) { this.executionDate = executionDate; } public String getLogLocal() { return logLocal; } public void setLogLocal(String logLocal) { this.logLocal = logLocal; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getTaskDefinitionUid() { return taskDefinitionUid; } public void setTaskDefinitionUid(String taskDefinitionUid) { this.taskDefinitionUid = taskDefinitionUid; } public String getCommandLine() { return commandLine; } public void setCommandLine(String commandLine) { this.commandLine = commandLine; } public boolean getIsReused() { return isReused; } public boolean getHasUpstreams() { return hasUpstreams; } public void setHasUpstreams(boolean hasUpstreams) { this.hasUpstreams = hasUpstreams; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public void setTaskRunAttemptUid(String taskRunAttemptUid) { this.taskRunAttemptUid = taskRunAttemptUid; } public String getTaskAfId() { return taskAfId; } public void setTaskAfId(String taskAfId) { this.taskAfId = taskAfId; } public boolean getIsDynamic() { return isDynamic; } public boolean getHasDownstreams() { return hasDownstreams; } public void setHasDownstreams(boolean hasDownstreams) { this.hasDownstreams = hasDownstreams; } public String getFunctionalCall() { return functionalCall; } public void setFunctionalCall(String functionalCall) { this.functionalCall = functionalCall; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getEnv() { return env; } public void setEnv(String env) { this.env = env; } public void setRoot(boolean root) { isRoot = root; } public void setSystem(boolean system) { isSystem = system; } public void setSkipped(boolean skipped) { isSkipped = skipped; } public void setReused(boolean reused) { isReused = reused; } public void setDynamic(boolean dynamic) { isDynamic = dynamic; } public ZonedDateTime getStartDate() { return startDate; } public void setStartDate(ZonedDateTime startDate) { this.startDate = startDate; } public ZonedDateTime getEndDate() { return endDate; } public void setEndDate(ZonedDateTime endDate) { this.endDate = endDate; } public Integer getLatestTaskRunAttemptId() { return latestTaskRunAttemptId; } public void setLatestTaskRunAttemptId(Integer latestTaskRunAttemptId) { this.latestTaskRunAttemptId = latestTaskRunAttemptId; } public Map<String, String> getExternalLinks() { return externalLinks; } public void appendLog(String msg) { if (logBuffer == null) { logBuffer = new StringBuilder(); } for (TaskRun upstream : upstreamTasks) { upstream.appendLog(msg); } logBuffer.append(msg); } @JsonIgnore public String getTaskLog() { if (logBuffer == null) { return null; } return logBuffer.toString(); } public void appendPrefixedMetrics(Map<String, Object> values) { if (metrics == null) { metrics = new HashMap<>(1); } metrics.putAll(values); } public void appendMetrics(Map<String, Object> values) { for (TaskRun upstream : upstreamTasks) { upstream.appendMetrics(values); } if (metrics == null) { metrics = new HashMap<>(1); metrics.putAll(values); return; } for (Map.Entry<String, Object> m : values.entrySet()) { String key = m.getKey(); if (metrics.containsKey(key)) { Object existingValue = metrics.get(key); if (existingValue instanceof Number) { metrics.put(key, (Long) existingValue + (Long) m.getValue()); } } } } @JsonIgnore public Map<String, Object> getMetrics() { if (metrics == null) { return Collections.emptyMap(); } return metrics; } public void addUpstream(TaskRun upstream) { upstreamTasks.add(upstream); } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskRunAttempt.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class TaskRunAttempt { private String id; private ErrorInfo error; public String getId() { return id; } public void setId(String id) { this.id = id; } public ErrorInfo getError() { return error; } public void setError(ErrorInfo error) { this.error = error; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskRunAttemptLog.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; @JsonIgnoreProperties(ignoreUnknown = true) public class TaskRunAttemptLog { private String taskRunAttemptUid; private String taskId; private String taskUid; private String logBody; public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public void setTaskRunAttemptUid(String taskRunAttemptUid) { this.taskRunAttemptUid = taskRunAttemptUid; } public String getTaskId() { return taskId; } public void setTaskId(String taskId) { this.taskId = taskId; } public String getTaskUid() { return taskUid; } public void setTaskUid(String taskUid) { this.taskUid = taskUid; } public String getLogBody() { return logBody; } public void setLogBody(String logBody) { this.logBody = logBody; } }
0
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand
java-sources/ai/databand/dbnd-client/1.0.28.1/ai/databand/schema/TaskRunAttemptUpdate.java
/* * © Copyright Databand.ai, an IBM Company 2022 */ package ai.databand.schema; import ai.databand.schema.jackson.ZonedDateTimeDeserializer; import ai.databand.schema.jackson.ZonedDateTimeSerializer; import com.fasterxml.jackson.databind.annotation.JsonDeserialize; import com.fasterxml.jackson.databind.annotation.JsonSerialize; import java.time.ZonedDateTime; import java.util.Collections; import java.util.Map; public class TaskRunAttemptUpdate { private final String taskRunUid; private final String taskRunAttemptUid; private final String state; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private final ZonedDateTime timestamp; private final ErrorInfo error; @JsonSerialize(using = ZonedDateTimeSerializer.class) @JsonDeserialize(using = ZonedDateTimeDeserializer.class) private final ZonedDateTime startDate; private final Map<String, String> externalLinksDict; public TaskRunAttemptUpdate(String taskRunUid, String taskRunAttemptUid, String state, ZonedDateTime timestamp, ZonedDateTime startDate, ErrorInfo error) { this(taskRunUid, taskRunAttemptUid, state, timestamp, startDate, error, Collections.emptyMap()); } public TaskRunAttemptUpdate(String taskRunUid, String taskRunAttemptUid, String state, ZonedDateTime timestamp, ZonedDateTime startDate, ErrorInfo error, Map<String, String> externalLinksDict) { this.taskRunUid = taskRunUid; this.taskRunAttemptUid = taskRunAttemptUid; this.state = state; this.timestamp = timestamp; this.startDate = startDate; this.error = error; this.externalLinksDict = externalLinksDict; } public String getTaskRunUid() { return taskRunUid; } public String getTaskRunAttemptUid() { return taskRunAttemptUid; } public String getState() { return state; } public ZonedDateTime getTimestamp() { return timestamp; } public ErrorInfo getError() { return error; } public ZonedDateTime getStartDate() { return startDate; } public Map<String, String> getExternalLinksDict() { return externalLinksDict; } }