text
stringlengths 7
1.01M
|
|---|
/**
* Copyright 2019 The JoyQueue Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.chubao.joyqueue.broker.kafka.command;
import io.chubao.joyqueue.broker.kafka.KafkaCommandType;
/**
* Created by zhangkepeng on 17-2-10.
*/
public class LeaveGroupResponse extends KafkaRequestOrResponse {
private short errorCode;
public LeaveGroupResponse() {
}
public LeaveGroupResponse(short errorCode) {
this.errorCode = errorCode;
}
public short getErrorCode() {
return errorCode;
}
public void setErrorCode(short errorCode) {
this.errorCode = errorCode;
}
@Override
public int type() {
return KafkaCommandType.LEAVE_GROUP.getCode();
}
@Override
public String toString() {
StringBuilder responseStringBuilder = new StringBuilder();
responseStringBuilder.append("Name: " + this.getClass().getSimpleName());
return responseStringBuilder.toString();
}
}
|
package com.iteaj.util;
/**
* create time: 2018/7/22
* 一般使用在对外开放Api的场景
* @see ApiResponse
* @see com.iteaj.util.module.mvc.ApiController
* @see IErrorCode
* @author iteaj
* @version 1.0
* @since JDK1.7
*/
public class ApiException extends RuntimeException {
private IErrorCode code;
public ApiException(IErrorCode code) {
this.code = code;
}
public ApiException(Throwable cause, IErrorCode code) {
super(cause);
this.code = code;
}
public IErrorCode getCode() {
return code;
}
}
|
/**
* Copyright 2017 SPeCS.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License. under the License.
*/
package weaver.kadabra.control;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import weaver.kadabra.concurrent.KadabraChannel;
import weaver.kadabra.concurrent.KadabraThread;
import weaver.kadabra.concurrent.Product;
/**
*
* @author tiago
*
* @param <T>
* Testing type
*/
public class ConcurrentControlPoint<T> {
private AtomicReference<ControlPoint<T>> cp;
private KadabraChannel<T, Long> measurementsChannel;
private KadabraChannel<T, T> versionsChannel;
private KadabraThread thread;
private long timeout;
private TimeUnit unit;
public static <T> ConcurrentControlPoint<T> newInstance(String cpName, int measurementsCapacity,
int versionsCapacity) {
return newInstance(new ControlPoint<>(cpName), measurementsCapacity, versionsCapacity);
}
public static <T> ConcurrentControlPoint<T> newInstance(ControlPoint<T> cp, int measurementsCapacity) {
return newInstance(cp, measurementsCapacity, 1);
}
public static <T> ConcurrentControlPoint<T> newInstance(ControlPoint<T> cp, int measurementsCapacity,
int versionsCapacity) {
return new ConcurrentControlPoint<>(cp, measurementsCapacity, versionsCapacity);
}
private ConcurrentControlPoint(ControlPoint<T> cp, int measurementsCapacity, int versionsCapacity) {
measurementsChannel = KadabraChannel.newInstance(measurementsCapacity);
versionsChannel = KadabraChannel.newInstance(versionsCapacity);
this.cp = new AtomicReference<>(cp);
thread = KadabraThread.newInstance(this::run);
timeout = 1000;
unit = TimeUnit.MILLISECONDS;
}
/**
* Starts the thread that expects measurements
*/
public void startThread() {
thread.start();
}
/**
* Terminate the thread execution
*/
public void stopThread() {
thread.terminate();
}
private void run() {
System.out.println("STARTED");
while (thread.isRunning()) {
Product<T, Long> measurement = measurementsChannel.poll(timeout, unit);
if (measurement == null) {
continue;
}
System.out.println(measurement);
cp.get().measurement(measurement.getKey(), measurement.getValue());
}
System.out.println("Done");
}
public boolean offer(T id, long measurement) {
return measurementsChannel.offer(id, measurement);
}
public T update() {
return cp.get().update();
}
/**
* Gets a version to test or the best version so far, if no version to test exists<br>
* <b>NOTE:</b>best may be null if no tests were done so far
*
* @return
*/
public T pollOrBest() {
T poll = poll();
return poll != null ? poll : cp.get().getBest();
}
/**
* Gets a version to test, or null if no version to test exists
*
* @return
*/
public T poll() {
Product<T, T> poll = versionsChannel.poll();
if (poll == null) {
return null;
}
return poll.getValue();
}
public ControlPoint<T> getControlPoint() {
return cp.get();
}
}
|
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp;
import com.squareup.okhttp.internal.Internal;
import com.squareup.okhttp.internal.SslContextBuilder;
import com.squareup.okhttp.internal.Util;
import com.squareup.okhttp.internal.io.InMemoryFileSystem;
import com.squareup.okhttp.mockwebserver.MockResponse;
import com.squareup.okhttp.mockwebserver.MockWebServer;
import com.squareup.okhttp.mockwebserver.RecordedRequest;
import java.io.File;
import java.io.IOException;
import java.net.CookieHandler;
import java.net.CookieManager;
import java.net.HttpCookie;
import java.net.HttpURLConnection;
import java.net.ResponseCache;
import java.security.Principal;
import java.security.cert.Certificate;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.NoSuchElementException;
import java.util.TimeZone;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import okio.Buffer;
import okio.BufferedSink;
import okio.BufferedSource;
import okio.GzipSink;
import okio.Okio;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import static com.squareup.okhttp.mockwebserver.SocketPolicy.DISCONNECT_AT_END;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/** Test caching with {@link OkUrlFactory}. */
public final class CacheTest {
private static final HostnameVerifier NULL_HOSTNAME_VERIFIER = new HostnameVerifier() {
@Override public boolean verify(String s, SSLSession sslSession) {
return true;
}
};
@Rule public MockWebServer server = new MockWebServer();
@Rule public MockWebServer server2 = new MockWebServer();
@Rule public InMemoryFileSystem fileSystem = new InMemoryFileSystem();
private final SSLContext sslContext = SslContextBuilder.localhost();
private final OkHttpClient client = new OkHttpClient();
private Cache cache;
private final CookieManager cookieManager = new CookieManager();
@Before public void setUp() throws Exception {
server.setProtocolNegotiationEnabled(false);
cache = new Cache(new File("/cache/"), Integer.MAX_VALUE, fileSystem);
client.setCache(cache);
CookieHandler.setDefault(cookieManager);
}
@After public void tearDown() throws Exception {
ResponseCache.setDefault(null);
CookieHandler.setDefault(null);
cache.delete();
}
/**
* Test that response caching is consistent with the RI and the spec.
* http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.4
*/
@Test public void responseCachingByResponseCode() throws Exception {
// Test each documented HTTP/1.1 code, plus the first unused value in each range.
// http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
// We can't test 100 because it's not really a response.
// assertCached(false, 100);
assertCached(false, 101);
assertCached(false, 102);
assertCached(true, 200);
assertCached(false, 201);
assertCached(false, 202);
assertCached(true, 203);
assertCached(true, 204);
assertCached(false, 205);
assertCached(false, 206); //Electing to not cache partial responses
assertCached(false, 207);
assertCached(true, 300);
assertCached(true, 301);
assertCached(true, 302);
assertCached(false, 303);
assertCached(false, 304);
assertCached(false, 305);
assertCached(false, 306);
assertCached(true, 307);
assertCached(true, 308);
assertCached(false, 400);
assertCached(false, 401);
assertCached(false, 402);
assertCached(false, 403);
assertCached(true, 404);
assertCached(true, 405);
assertCached(false, 406);
assertCached(false, 408);
assertCached(false, 409);
// the HTTP spec permits caching 410s, but the RI doesn't.
assertCached(true, 410);
assertCached(false, 411);
assertCached(false, 412);
assertCached(false, 413);
assertCached(true, 414);
assertCached(false, 415);
assertCached(false, 416);
assertCached(false, 417);
assertCached(false, 418);
assertCached(false, 500);
assertCached(true, 501);
assertCached(false, 502);
assertCached(false, 503);
assertCached(false, 504);
assertCached(false, 505);
assertCached(false, 506);
}
private void assertCached(boolean shouldPut, int responseCode) throws Exception {
server = new MockWebServer();
MockResponse mockResponse = new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setResponseCode(responseCode)
.setBody("ABCDE")
.addHeader("WWW-Authenticate: challenge");
if (responseCode == HttpURLConnection.HTTP_PROXY_AUTH) {
mockResponse.addHeader("Proxy-Authenticate: Basic realm=\"protected area\"");
} else if (responseCode == HttpURLConnection.HTTP_UNAUTHORIZED) {
mockResponse.addHeader("WWW-Authenticate: Basic realm=\"protected area\"");
} else if (responseCode == HttpURLConnection.HTTP_NO_CONTENT
|| responseCode == HttpURLConnection.HTTP_RESET) {
mockResponse.setBody(""); // We forbid bodies for 204 and 205.
}
server.enqueue(mockResponse);
server.start();
Request request = new Request.Builder()
.url(server.url("/"))
.build();
Response response = client.newCall(request).execute();
assertEquals(responseCode, response.code());
// Exhaust the content stream.
response.body().string();
Response cached = cache.get(request);
if (shouldPut) {
assertNotNull(Integer.toString(responseCode), cached);
cached.body().close();
} else {
assertNull(Integer.toString(responseCode), cached);
}
server.shutdown(); // tearDown() isn't sufficient; this test starts multiple servers
}
@Test public void responseCachingAndInputStreamSkipWithFixedLength() throws IOException {
testResponseCaching(TransferKind.FIXED_LENGTH);
}
@Test public void responseCachingAndInputStreamSkipWithChunkedEncoding() throws IOException {
testResponseCaching(TransferKind.CHUNKED);
}
@Test public void responseCachingAndInputStreamSkipWithNoLengthHeaders() throws IOException {
testResponseCaching(TransferKind.END_OF_STREAM);
}
/**
* Skipping bytes in the input stream caused ResponseCache corruption.
* http://code.google.com/p/android/issues/detail?id=8175
*/
private void testResponseCaching(TransferKind transferKind) throws IOException {
MockResponse mockResponse = new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setStatus("HTTP/1.1 200 Fantastic");
transferKind.setBody(mockResponse, "I love puppies but hate spiders", 1);
server.enqueue(mockResponse);
// Make sure that calling skip() doesn't omit bytes from the cache.
Request request = new Request.Builder().url(server.url("/")).build();
Response response1 = client.newCall(request).execute();
BufferedSource in1 = response1.body().source();
assertEquals("I love ", in1.readUtf8("I love ".length()));
in1.skip("puppies but hate ".length());
assertEquals("spiders", in1.readUtf8("spiders".length()));
assertTrue(in1.exhausted());
in1.close();
assertEquals(1, cache.getWriteSuccessCount());
assertEquals(0, cache.getWriteAbortCount());
Response response2 = client.newCall(request).execute();
BufferedSource in2 = response2.body().source();
assertEquals("I love puppies but hate spiders",
in2.readUtf8("I love puppies but hate spiders".length()));
assertEquals(200, response2.code());
assertEquals("Fantastic", response2.message());
assertTrue(in2.exhausted());
in2.close();
assertEquals(1, cache.getWriteSuccessCount());
assertEquals(0, cache.getWriteAbortCount());
assertEquals(2, cache.getRequestCount());
assertEquals(1, cache.getHitCount());
}
@Test public void secureResponseCaching() throws IOException {
server.useHttps(sslContext.getSocketFactory(), false);
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setBody("ABC"));
client.setSslSocketFactory(sslContext.getSocketFactory());
client.setHostnameVerifier(NULL_HOSTNAME_VERIFIER);
Request request = new Request.Builder().url(server.url("/")).build();
Response response1 = client.newCall(request).execute();
BufferedSource in = response1.body().source();
assertEquals("ABC", in.readUtf8());
// OpenJDK 6 fails on this line, complaining that the connection isn't open yet
String suite = response1.handshake().cipherSuite();
List<Certificate> localCerts = response1.handshake().localCertificates();
List<Certificate> serverCerts = response1.handshake().peerCertificates();
Principal peerPrincipal = response1.handshake().peerPrincipal();
Principal localPrincipal = response1.handshake().localPrincipal();
Response response2 = client.newCall(request).execute(); // Cached!
assertEquals("ABC", response2.body().string());
assertEquals(2, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(1, cache.getHitCount());
assertEquals(suite, response2.handshake().cipherSuite());
assertEquals(localCerts, response2.handshake().localCertificates());
assertEquals(serverCerts, response2.handshake().peerCertificates());
assertEquals(peerPrincipal, response2.handshake().peerPrincipal());
assertEquals(localPrincipal, response2.handshake().localPrincipal());
}
@Test public void responseCachingAndRedirects() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setResponseCode(HttpURLConnection.HTTP_MOVED_PERM)
.addHeader("Location: /foo"));
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setBody("ABC"));
server.enqueue(new MockResponse()
.setBody("DEF"));
Request request = new Request.Builder().url(server.url("/")).build();
Response response1 = client.newCall(request).execute();
assertEquals("ABC", response1.body().string());
Response response2 = client.newCall(request).execute(); // Cached!
assertEquals("ABC", response2.body().string());
assertEquals(4, cache.getRequestCount()); // 2 requests + 2 redirects
assertEquals(2, cache.getNetworkCount());
assertEquals(2, cache.getHitCount());
}
@Test public void redirectToCachedResult() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("ABC"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_MOVED_PERM)
.addHeader("Location: /foo"));
server.enqueue(new MockResponse()
.setBody("DEF"));
Request request1 = new Request.Builder().url(server.url("/foo")).build();
Response response1 = client.newCall(request1).execute();
assertEquals("ABC", response1.body().string());
RecordedRequest recordedRequest1 = server.takeRequest();
assertEquals("GET /foo HTTP/1.1", recordedRequest1.getRequestLine());
assertEquals(0, recordedRequest1.getSequenceNumber());
Request request2 = new Request.Builder().url(server.url("/bar")).build();
Response response2 = client.newCall(request2).execute();
assertEquals("ABC", response2.body().string());
RecordedRequest recordedRequest2 = server.takeRequest();
assertEquals("GET /bar HTTP/1.1", recordedRequest2.getRequestLine());
assertEquals(1, recordedRequest2.getSequenceNumber());
// an unrelated request should reuse the pooled connection
Request request3 = new Request.Builder().url(server.url("/baz")).build();
Response response3 = client.newCall(request3).execute();
assertEquals("DEF", response3.body().string());
RecordedRequest recordedRequest3 = server.takeRequest();
assertEquals("GET /baz HTTP/1.1", recordedRequest3.getRequestLine());
assertEquals(2, recordedRequest3.getSequenceNumber());
}
@Test public void secureResponseCachingAndRedirects() throws IOException {
server.useHttps(sslContext.getSocketFactory(), false);
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setResponseCode(HttpURLConnection.HTTP_MOVED_PERM)
.addHeader("Location: /foo"));
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setBody("ABC"));
server.enqueue(new MockResponse()
.setBody("DEF"));
client.setSslSocketFactory(sslContext.getSocketFactory());
client.setHostnameVerifier(NULL_HOSTNAME_VERIFIER);
Response response1 = get(server.url("/"));
assertEquals("ABC", response1.body().string());
assertNotNull(response1.handshake().cipherSuite());
// Cached!
Response response2 = get(server.url("/"));
assertEquals("ABC", response2.body().string());
assertNotNull(response2.handshake().cipherSuite());
assertEquals(4, cache.getRequestCount()); // 2 direct + 2 redirect = 4
assertEquals(2, cache.getHitCount());
assertEquals(response1.handshake().cipherSuite(), response2.handshake().cipherSuite());
}
/**
* We've had bugs where caching and cross-protocol redirects yield class
* cast exceptions internal to the cache because we incorrectly assumed that
* HttpsURLConnection was always HTTPS and HttpURLConnection was always HTTP;
* in practice redirects mean that each can do either.
*
* https://github.com/square/okhttp/issues/214
*/
@Test public void secureResponseCachingAndProtocolRedirects() throws IOException {
server2.useHttps(sslContext.getSocketFactory(), false);
server2.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setBody("ABC"));
server2.enqueue(new MockResponse()
.setBody("DEF"));
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.setResponseCode(HttpURLConnection.HTTP_MOVED_PERM)
.addHeader("Location: " + server2.url("/")));
client.setSslSocketFactory(sslContext.getSocketFactory());
client.setHostnameVerifier(NULL_HOSTNAME_VERIFIER);
Response response1 = get(server.url("/"));
assertEquals("ABC", response1.body().string());
// Cached!
Response response2 = get(server.url("/"));
assertEquals("ABC", response2.body().string());
assertEquals(4, cache.getRequestCount()); // 2 direct + 2 redirect = 4
assertEquals(2, cache.getHitCount());
}
@Test public void foundCachedWithExpiresHeader() throws Exception {
temporaryRedirectCachedWithCachingHeader(302, "Expires", formatDate(1, TimeUnit.HOURS));
}
@Test public void foundCachedWithCacheControlHeader() throws Exception {
temporaryRedirectCachedWithCachingHeader(302, "Cache-Control", "max-age=60");
}
@Test public void temporaryRedirectCachedWithExpiresHeader() throws Exception {
temporaryRedirectCachedWithCachingHeader(307, "Expires", formatDate(1, TimeUnit.HOURS));
}
@Test public void temporaryRedirectCachedWithCacheControlHeader() throws Exception {
temporaryRedirectCachedWithCachingHeader(307, "Cache-Control", "max-age=60");
}
@Test public void foundNotCachedWithoutCacheHeader() throws Exception {
temporaryRedirectNotCachedWithoutCachingHeader(302);
}
@Test public void temporaryRedirectNotCachedWithoutCacheHeader() throws Exception {
temporaryRedirectNotCachedWithoutCachingHeader(307);
}
private void temporaryRedirectCachedWithCachingHeader(
int responseCode, String headerName, String headerValue) throws Exception {
server.enqueue(new MockResponse()
.setResponseCode(responseCode)
.addHeader(headerName, headerValue)
.addHeader("Location", "/a"));
server.enqueue(new MockResponse()
.addHeader(headerName, headerValue)
.setBody("a"));
server.enqueue(new MockResponse()
.setBody("b"));
server.enqueue(new MockResponse()
.setBody("c"));
HttpUrl url = server.url("/");
assertEquals("a", get(url).body().string());
assertEquals("a", get(url).body().string());
}
private void temporaryRedirectNotCachedWithoutCachingHeader(int responseCode) throws Exception {
server.enqueue(new MockResponse()
.setResponseCode(responseCode)
.addHeader("Location", "/a"));
server.enqueue(new MockResponse()
.setBody("a"));
server.enqueue(new MockResponse()
.setBody("b"));
HttpUrl url = server.url("/");
assertEquals("a", get(url).body().string());
assertEquals("b", get(url).body().string());
}
@Test public void serverDisconnectsPrematurelyWithContentLengthHeader() throws IOException {
testServerPrematureDisconnect(TransferKind.FIXED_LENGTH);
}
@Test public void serverDisconnectsPrematurelyWithChunkedEncoding() throws IOException {
testServerPrematureDisconnect(TransferKind.CHUNKED);
}
@Test public void serverDisconnectsPrematurelyWithNoLengthHeaders() throws IOException {
// Intentionally empty. This case doesn't make sense because there's no
// such thing as a premature disconnect when the disconnect itself
// indicates the end of the data stream.
}
private void testServerPrematureDisconnect(TransferKind transferKind) throws IOException {
MockResponse mockResponse = new MockResponse();
transferKind.setBody(mockResponse, "ABCDE\nFGHIJKLMNOPQRSTUVWXYZ", 16);
server.enqueue(truncateViolently(mockResponse, 16));
server.enqueue(new MockResponse()
.setBody("Request #2"));
BufferedSource bodySource = get(server.url("/")).body().source();
assertEquals("ABCDE", bodySource.readUtf8Line());
try {
bodySource.readUtf8Line();
fail("This implementation silently ignored a truncated HTTP body.");
} catch (IOException expected) {
} finally {
bodySource.close();
}
assertEquals(1, cache.getWriteAbortCount());
assertEquals(0, cache.getWriteSuccessCount());
Response response = get(server.url("/"));
assertEquals("Request #2", response.body().string());
assertEquals(1, cache.getWriteAbortCount());
assertEquals(1, cache.getWriteSuccessCount());
}
@Test public void clientPrematureDisconnectWithContentLengthHeader() throws IOException {
testClientPrematureDisconnect(TransferKind.FIXED_LENGTH);
}
@Test public void clientPrematureDisconnectWithChunkedEncoding() throws IOException {
testClientPrematureDisconnect(TransferKind.CHUNKED);
}
@Test public void clientPrematureDisconnectWithNoLengthHeaders() throws IOException {
testClientPrematureDisconnect(TransferKind.END_OF_STREAM);
}
private void testClientPrematureDisconnect(TransferKind transferKind) throws IOException {
// Setting a low transfer speed ensures that stream discarding will time out.
MockResponse mockResponse = new MockResponse()
.throttleBody(6, 1, TimeUnit.SECONDS);
transferKind.setBody(mockResponse, "ABCDE\nFGHIJKLMNOPQRSTUVWXYZ", 1024);
server.enqueue(mockResponse);
server.enqueue(new MockResponse()
.setBody("Request #2"));
Response response1 = get(server.url("/"));
BufferedSource in = response1.body().source();
assertEquals("ABCDE", in.readUtf8(5));
in.close();
try {
in.readByte();
fail("Expected an IllegalStateException because the source is closed.");
} catch (IllegalStateException expected) {
}
assertEquals(1, cache.getWriteAbortCount());
assertEquals(0, cache.getWriteSuccessCount());
Response response2 = get(server.url("/"));
assertEquals("Request #2", response2.body().string());
assertEquals(1, cache.getWriteAbortCount());
assertEquals(1, cache.getWriteSuccessCount());
}
@Test public void defaultExpirationDateFullyCachedForLessThan24Hours() throws Exception {
// last modified: 105 seconds ago
// served: 5 seconds ago
// default lifetime: (105 - 5) / 10 = 10 seconds
// expires: 10 seconds from served date = 5 seconds from now
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-105, TimeUnit.SECONDS))
.addHeader("Date: " + formatDate(-5, TimeUnit.SECONDS))
.setBody("A"));
HttpUrl url = server.url("/");
Response response1 = get(url);
assertEquals("A", response1.body().string());
Response response2 = get(url);
assertEquals("A", response2.body().string());
assertNull(response2.header("Warning"));
}
@Test public void defaultExpirationDateConditionallyCached() throws Exception {
// last modified: 115 seconds ago
// served: 15 seconds ago
// default lifetime: (115 - 15) / 10 = 10 seconds
// expires: 10 seconds from served date = 5 seconds ago
String lastModifiedDate = formatDate(-115, TimeUnit.SECONDS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Date: " + formatDate(-15, TimeUnit.SECONDS)));
assertEquals(lastModifiedDate, conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void defaultExpirationDateFullyCachedForMoreThan24Hours() throws Exception {
// last modified: 105 days ago
// served: 5 days ago
// default lifetime: (105 - 5) / 10 = 10 days
// expires: 10 days from served date = 5 days from now
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-105, TimeUnit.DAYS))
.addHeader("Date: " + formatDate(-5, TimeUnit.DAYS))
.setBody("A"));
assertEquals("A", get(server.url("/")).body().string());
Response response = get(server.url("/"));
assertEquals("A", response.body().string());
assertEquals("113 HttpURLConnection \"Heuristic expiration\"", response.header("Warning"));
}
@Test public void noDefaultExpirationForUrlsWithQueryString() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-105, TimeUnit.SECONDS))
.addHeader("Date: " + formatDate(-5, TimeUnit.SECONDS))
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/").newBuilder().addQueryParameter("foo", "bar").build();
assertEquals("A", get(url).body().string());
assertEquals("B", get(url).body().string());
}
@Test public void expirationDateInThePastWithLastModifiedHeader() throws Exception {
String lastModifiedDate = formatDate(-2, TimeUnit.HOURS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
assertEquals(lastModifiedDate, conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void expirationDateInThePastWithNoLastModifiedHeader() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
}
@Test public void expirationDateInTheFuture() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
}
@Test public void maxAgePreferredWithMaxAgeAndExpires() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Date: " + formatDate(0, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgeInThePastWithDateAndLastModifiedHeaders() throws Exception {
String lastModifiedDate = formatDate(-2, TimeUnit.HOURS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("Date: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Cache-Control: max-age=60"));
assertEquals(lastModifiedDate, conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void maxAgeInThePastWithDateHeaderButNoLastModifiedHeader() throws Exception {
// Chrome interprets max-age relative to the local clock. Both our cache
// and Firefox both use the earlier of the local and server's clock.
assertNotCached(new MockResponse()
.addHeader("Date: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgeInTheFutureWithDateHeader() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Date: " + formatDate(0, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgeInTheFutureWithNoDateHeader() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgeWithLastModifiedButNoServedDate() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgeInTheFutureWithDateAndLastModifiedHeaders() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Date: " + formatDate(0, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=60"));
}
@Test public void maxAgePreferredOverLowerSharedMaxAge() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("Date: " + formatDate(-2, TimeUnit.MINUTES))
.addHeader("Cache-Control: s-maxage=60")
.addHeader("Cache-Control: max-age=180"));
}
@Test public void maxAgePreferredOverHigherMaxAge() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Date: " + formatDate(-2, TimeUnit.MINUTES))
.addHeader("Cache-Control: s-maxage=180")
.addHeader("Cache-Control: max-age=60"));
}
@Test public void requestMethodOptionsIsNotCached() throws Exception {
testRequestMethod("OPTIONS", false);
}
@Test public void requestMethodGetIsCached() throws Exception {
testRequestMethod("GET", true);
}
@Test public void requestMethodHeadIsNotCached() throws Exception {
// We could support this but choose not to for implementation simplicity
testRequestMethod("HEAD", false);
}
@Test public void requestMethodPostIsNotCached() throws Exception {
// We could support this but choose not to for implementation simplicity
testRequestMethod("POST", false);
}
@Test public void requestMethodPutIsNotCached() throws Exception {
testRequestMethod("PUT", false);
}
@Test public void requestMethodDeleteIsNotCached() throws Exception {
testRequestMethod("DELETE", false);
}
@Test public void requestMethodTraceIsNotCached() throws Exception {
testRequestMethod("TRACE", false);
}
private void testRequestMethod(String requestMethod, boolean expectCached) throws Exception {
// 1. seed the cache (potentially)
// 2. expect a cache hit or miss
server.enqueue(new MockResponse()
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.addHeader("X-Response-ID: 1"));
server.enqueue(new MockResponse()
.addHeader("X-Response-ID: 2"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.method(requestMethod, requestBodyOrNull(requestMethod))
.build();
Response response1 = client.newCall(request).execute();
response1.body().close();
assertEquals("1", response1.header("X-Response-ID"));
Response response2 = get(url);
response2.body().close();
if (expectCached) {
assertEquals("1", response2.header("X-Response-ID"));
} else {
assertEquals("2", response2.header("X-Response-ID"));
}
}
private RequestBody requestBodyOrNull(String requestMethod) {
return (requestMethod.equals("POST") || requestMethod.equals("PUT"))
? RequestBody.create(MediaType.parse("text/plain"), "foo")
: null;
}
@Test public void postInvalidatesCache() throws Exception {
testMethodInvalidates("POST");
}
@Test public void putInvalidatesCache() throws Exception {
testMethodInvalidates("PUT");
}
@Test public void deleteMethodInvalidatesCache() throws Exception {
testMethodInvalidates("DELETE");
}
private void testMethodInvalidates(String requestMethod) throws Exception {
// 1. seed the cache
// 2. invalidate it
// 3. expect a cache miss
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
server.enqueue(new MockResponse()
.setBody("B"));
server.enqueue(new MockResponse()
.setBody("C"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
Request request = new Request.Builder()
.url(url)
.method(requestMethod, requestBodyOrNull(requestMethod))
.build();
Response invalidate = client.newCall(request).execute();
assertEquals("B", invalidate.body().string());
assertEquals("C", get(url).body().string());
}
@Test public void postInvalidatesCacheWithUncacheableResponse() throws Exception {
// 1. seed the cache
// 2. invalidate it with uncacheable response
// 3. expect a cache miss
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
server.enqueue(new MockResponse()
.setBody("B")
.setResponseCode(500));
server.enqueue(new MockResponse()
.setBody("C"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
Request request = new Request.Builder()
.url(url)
.method("POST", requestBodyOrNull("POST"))
.build();
Response invalidate = client.newCall(request).execute();
assertEquals("B", invalidate.body().string());
assertEquals("C", get(url).body().string());
}
@Test public void etag() throws Exception {
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("ETag: v1"));
assertEquals("v1", conditionalRequest.getHeader("If-None-Match"));
}
/** If both If-Modified-Since and If-None-Match conditions apply, send only If-None-Match. */
@Test public void etagAndExpirationDateInThePast() throws Exception {
String lastModifiedDate = formatDate(-2, TimeUnit.HOURS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("ETag: v1")
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
assertEquals("v1", conditionalRequest.getHeader("If-None-Match"));
assertNull(conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void etagAndExpirationDateInTheFuture() throws Exception {
assertFullyCached(new MockResponse()
.addHeader("ETag: v1")
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
}
@Test public void cacheControlNoCache() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Cache-Control: no-cache"));
}
@Test public void cacheControlNoCacheAndExpirationDateInTheFuture() throws Exception {
String lastModifiedDate = formatDate(-2, TimeUnit.HOURS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.addHeader("Cache-Control: no-cache"));
assertEquals(lastModifiedDate, conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void pragmaNoCache() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Pragma: no-cache"));
}
@Test public void pragmaNoCacheAndExpirationDateInTheFuture() throws Exception {
String lastModifiedDate = formatDate(-2, TimeUnit.HOURS);
RecordedRequest conditionalRequest = assertConditionallyCached(new MockResponse()
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.addHeader("Pragma: no-cache"));
assertEquals(lastModifiedDate, conditionalRequest.getHeader("If-Modified-Since"));
}
@Test public void cacheControlNoStore() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Cache-Control: no-store"));
}
@Test public void cacheControlNoStoreAndExpirationDateInTheFuture() throws Exception {
assertNotCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.addHeader("Cache-Control: no-store"));
}
@Test public void partialRangeResponsesDoNotCorruptCache() throws Exception {
// 1. request a range
// 2. request a full document, expecting a cache miss
server.enqueue(new MockResponse()
.setBody("AA")
.setResponseCode(HttpURLConnection.HTTP_PARTIAL)
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS))
.addHeader("Content-Range: bytes 1000-1001/2000"));
server.enqueue(new MockResponse()
.setBody("BB"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.header("Range", "bytes=1000-1001")
.build();
Response range = client.newCall(request).execute();
assertEquals("AA", range.body().string());
assertEquals("BB", get(url).body().string());
}
@Test public void serverReturnsDocumentOlderThanCache() throws Exception {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
server.enqueue(new MockResponse()
.setBody("B")
.addHeader("Last-Modified: " + formatDate(-4, TimeUnit.HOURS)));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
assertEquals("A", get(url).body().string());
}
@Test public void clientSideNoStore() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("B"));
Request request1 = new Request.Builder()
.url(server.url("/"))
.cacheControl(new CacheControl.Builder().noStore().build())
.build();
Response response1 = client.newCall(request1).execute();
assertEquals("A", response1.body().string());
Request request2 = new Request.Builder()
.url(server.url("/"))
.build();
Response response2 = client.newCall(request2).execute();
assertEquals("B", response2.body().string());
}
@Test public void nonIdentityEncodingAndConditionalCache() throws Exception {
assertNonIdentityEncodingCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
}
@Test public void nonIdentityEncodingAndFullCache() throws Exception {
assertNonIdentityEncodingCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
}
private void assertNonIdentityEncodingCached(MockResponse response) throws Exception {
server.enqueue(response
.setBody(gzip("ABCABCABC"))
.addHeader("Content-Encoding: gzip"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
// At least three request/response pairs are required because after the first request is cached
// a different execution path might be taken. Thus modifications to the cache applied during
// the second request might not be visible until another request is performed.
assertEquals("ABCABCABC", get(server.url("/")).body().string());
assertEquals("ABCABCABC", get(server.url("/")).body().string());
assertEquals("ABCABCABC", get(server.url("/")).body().string());
}
@Test public void notModifiedSpecifiesEncoding() throws Exception {
server.enqueue(new MockResponse()
.setBody(gzip("ABCABCABC"))
.addHeader("Content-Encoding: gzip")
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(-1, TimeUnit.HOURS)));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED)
.addHeader("Content-Encoding: gzip"));
server.enqueue(new MockResponse()
.setBody("DEFDEFDEF"));
assertEquals("ABCABCABC", get(server.url("/")).body().string());
assertEquals("ABCABCABC", get(server.url("/")).body().string());
assertEquals("DEFDEFDEF", get(server.url("/")).body().string());
}
/** https://github.com/square/okhttp/issues/947 */
@Test public void gzipAndVaryOnAcceptEncoding() throws Exception {
server.enqueue(new MockResponse()
.setBody(gzip("ABCABCABC"))
.addHeader("Content-Encoding: gzip")
.addHeader("Vary: Accept-Encoding")
.addHeader("Cache-Control: max-age=60"));
server.enqueue(new MockResponse()
.setBody("FAIL"));
assertEquals("ABCABCABC", get(server.url("/")).body().string());
assertEquals("ABCABCABC", get(server.url("/")).body().string());
}
@Test public void conditionalCacheHitIsNotDoublePooled() throws Exception {
server.enqueue(new MockResponse()
.addHeader("ETag: v1")
.setBody("A"));
server.enqueue(new MockResponse()
.clearHeaders()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
ConnectionPool pool = ConnectionPool.getDefault();
pool.evictAll();
client.setConnectionPool(pool);
assertEquals("A", get(server.url("/")).body().string());
assertEquals("A", get(server.url("/")).body().string());
assertEquals(1, client.getConnectionPool().getIdleConnectionCount());
}
@Test public void expiresDateBeforeModifiedDate() throws Exception {
assertConditionallyCached(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Expires: " + formatDate(-2, TimeUnit.HOURS)));
}
@Test public void requestMaxAge() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Last-Modified: " + formatDate(-2, TimeUnit.HOURS))
.addHeader("Date: " + formatDate(-1, TimeUnit.MINUTES))
.addHeader("Expires: " + formatDate(1, TimeUnit.HOURS)));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "max-age=30")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void requestMinFresh() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=60")
.addHeader("Date: " + formatDate(0, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "min-fresh=120")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void requestMaxStale() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=120")
.addHeader("Date: " + formatDate(-4, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "max-stale=180")
.build();
Response response = client.newCall(request).execute();
assertEquals("A", response.body().string());
assertEquals("110 HttpURLConnection \"Response is stale\"", response.header("Warning"));
}
@Test public void requestMaxStaleDirectiveWithNoValue() throws IOException {
// Add a stale response to the cache.
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=120")
.addHeader("Date: " + formatDate(-4, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
// With max-stale, we'll return that stale response.
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "max-stale")
.build();
Response response = client.newCall(request).execute();
assertEquals("A", response.body().string());
assertEquals("110 HttpURLConnection \"Response is stale\"", response.header("Warning"));
}
@Test public void requestMaxStaleNotHonoredWithMustRevalidate() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=120, must-revalidate")
.addHeader("Date: " + formatDate(-4, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "max-stale=180")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void requestOnlyIfCachedWithNoResponseCached() throws IOException {
// (no responses enqueued)
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "only-if-cached")
.build();
Response response = client.newCall(request).execute();
assertTrue(response.body().source().exhausted());
assertEquals(504, response.code());
assertEquals(1, cache.getRequestCount());
assertEquals(0, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
}
@Test public void requestOnlyIfCachedWithFullResponseCached() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=30")
.addHeader("Date: " + formatDate(0, TimeUnit.MINUTES)));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "only-if-cached")
.build();
Response response = client.newCall(request).execute();
assertEquals("A", response.body().string());
assertEquals(2, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(1, cache.getHitCount());
}
@Test public void requestOnlyIfCachedWithConditionalResponseCached() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=30")
.addHeader("Date: " + formatDate(-1, TimeUnit.MINUTES)));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "only-if-cached")
.build();
Response response = client.newCall(request).execute();
assertTrue(response.body().source().exhausted());
assertEquals(504, response.code());
assertEquals(2, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
}
@Test public void requestOnlyIfCachedWithUnhelpfulResponseCached() throws IOException {
server.enqueue(new MockResponse()
.setBody("A"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/"))
.header("Cache-Control", "only-if-cached")
.build();
Response response = client.newCall(request).execute();
assertTrue(response.body().source().exhausted());
assertEquals(504, response.code());
assertEquals(2, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
}
@Test public void requestCacheControlNoCache() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Date: " + formatDate(0, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
Request request = new Request.Builder()
.url(url)
.header("Cache-Control", "no-cache")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void requestPragmaNoCache() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-120, TimeUnit.SECONDS))
.addHeader("Date: " + formatDate(0, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
Request request = new Request.Builder()
.url(url)
.header("Pragma", "no-cache")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void clientSuppliedIfModifiedSinceWithCachedResult() throws Exception {
MockResponse response = new MockResponse()
.addHeader("ETag: v3")
.addHeader("Cache-Control: max-age=0");
String ifModifiedSinceDate = formatDate(-24, TimeUnit.HOURS);
RecordedRequest request =
assertClientSuppliedCondition(response, "If-Modified-Since", ifModifiedSinceDate);
assertEquals(ifModifiedSinceDate, request.getHeader("If-Modified-Since"));
assertNull(request.getHeader("If-None-Match"));
}
@Test public void clientSuppliedIfNoneMatchSinceWithCachedResult() throws Exception {
String lastModifiedDate = formatDate(-3, TimeUnit.MINUTES);
MockResponse response = new MockResponse()
.addHeader("Last-Modified: " + lastModifiedDate)
.addHeader("Date: " + formatDate(-2, TimeUnit.MINUTES))
.addHeader("Cache-Control: max-age=0");
RecordedRequest request = assertClientSuppliedCondition(response, "If-None-Match", "v1");
assertEquals("v1", request.getHeader("If-None-Match"));
assertNull(request.getHeader("If-Modified-Since"));
}
private RecordedRequest assertClientSuppliedCondition(MockResponse seed, String conditionName,
String conditionValue) throws Exception {
server.enqueue(seed.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
Request request = new Request.Builder()
.url(url)
.header(conditionName, conditionValue)
.build();
Response response = client.newCall(request).execute();
assertEquals(HttpURLConnection.HTTP_NOT_MODIFIED, response.code());
assertEquals("", response.body().string());
server.takeRequest(); // seed
return server.takeRequest();
}
/**
* For Last-Modified and Date headers, we should echo the date back in the
* exact format we were served.
*/
@Test public void retainServedDateFormat() throws Exception {
// Serve a response with a non-standard date format that OkHttp supports.
Date lastModifiedDate = new Date(System.currentTimeMillis() + TimeUnit.HOURS.toMillis(-1));
Date servedDate = new Date(System.currentTimeMillis() + TimeUnit.HOURS.toMillis(-2));
DateFormat dateFormat = new SimpleDateFormat("EEE dd-MMM-yyyy HH:mm:ss z", Locale.US);
dateFormat.setTimeZone(TimeZone.getTimeZone("EDT"));
String lastModifiedString = dateFormat.format(lastModifiedDate);
String servedString = dateFormat.format(servedDate);
// This response should be conditionally cached.
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + lastModifiedString)
.addHeader("Expires: " + servedString)
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
assertEquals("A", get(server.url("/")).body().string());
assertEquals("A", get(server.url("/")).body().string());
// The first request has no conditions.
RecordedRequest request1 = server.takeRequest();
assertNull(request1.getHeader("If-Modified-Since"));
// The 2nd request uses the server's date format.
RecordedRequest request2 = server.takeRequest();
assertEquals(lastModifiedString, request2.getHeader("If-Modified-Since"));
}
@Test public void clientSuppliedConditionWithoutCachedResult() throws Exception {
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
Request request = new Request.Builder()
.url(server.url("/"))
.header("If-Modified-Since", formatDate(-24, TimeUnit.HOURS))
.build();
Response response = client.newCall(request).execute();
assertEquals(HttpURLConnection.HTTP_NOT_MODIFIED, response.code());
assertEquals("", response.body().string());
}
@Test public void authorizationRequestFullyCached() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.header("Authorization", "password")
.build();
Response response = client.newCall(request).execute();
assertEquals("A", response.body().string());
assertEquals("A", get(url).body().string());
}
@Test public void contentLocationDoesNotPopulateCache() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Content-Location: /bar")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/foo")).body().string());
assertEquals("B", get(server.url("/bar")).body().string());
}
@Test public void connectionIsReturnedToPoolAfterConditionalSuccess() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/a")).body().string());
assertEquals("A", get(server.url("/a")).body().string());
assertEquals("B", get(server.url("/b")).body().string());
assertEquals(0, server.takeRequest().getSequenceNumber());
assertEquals(1, server.takeRequest().getSequenceNumber());
assertEquals(2, server.takeRequest().getSequenceNumber());
}
@Test public void statisticsConditionalCacheMiss() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
server.enqueue(new MockResponse()
.setBody("C"));
assertEquals("A", get(server.url("/")).body().string());
assertEquals(1, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
assertEquals("B", get(server.url("/")).body().string());
assertEquals("C", get(server.url("/")).body().string());
assertEquals(3, cache.getRequestCount());
assertEquals(3, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
}
@Test public void statisticsConditionalCacheHit() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
assertEquals("A", get(server.url("/")).body().string());
assertEquals(1, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
assertEquals("A", get(server.url("/")).body().string());
assertEquals("A", get(server.url("/")).body().string());
assertEquals(3, cache.getRequestCount());
assertEquals(3, cache.getNetworkCount());
assertEquals(2, cache.getHitCount());
}
@Test public void statisticsFullCacheHit() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
assertEquals("A", get(server.url("/")).body().string());
assertEquals(1, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(0, cache.getHitCount());
assertEquals("A", get(server.url("/")).body().string());
assertEquals("A", get(server.url("/")).body().string());
assertEquals(3, cache.getRequestCount());
assertEquals(1, cache.getNetworkCount());
assertEquals(2, cache.getHitCount());
}
@Test public void varyMatchesChangedRequestHeaderField() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request frRequest = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.build();
Response frResponse = client.newCall(frRequest).execute();
assertEquals("A", frResponse.body().string());
Request enRequest = new Request.Builder()
.url(url)
.header("Accept-Language", "en-US")
.build();
Response enResponse = client.newCall(enRequest).execute();
assertEquals("B", enResponse.body().string());
}
@Test public void varyMatchesUnchangedRequestHeaderField() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.build();
Response response1 = client.newCall(request).execute();
assertEquals("A", response1.body().string());
Request request1 = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.build();
Response response2 = client.newCall(request1).execute();
assertEquals("A", response2.body().string());
}
@Test public void varyMatchesAbsentRequestHeaderField() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Foo")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
assertEquals("A", get(server.url("/")).body().string());
}
@Test public void varyMatchesAddedRequestHeaderField() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Foo")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/")).header("Foo", "bar")
.build();
Response response = client.newCall(request).execute();
assertEquals("B", response.body().string());
}
@Test public void varyMatchesRemovedRequestHeaderField() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Foo")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
Request request = new Request.Builder()
.url(server.url("/")).header("Foo", "bar")
.build();
Response fooresponse = client.newCall(request).execute();
assertEquals("A", fooresponse.body().string());
assertEquals("B", get(server.url("/")).body().string());
}
@Test public void varyFieldsAreCaseInsensitive() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: ACCEPT-LANGUAGE")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.build();
Response response1 = client.newCall(request).execute();
assertEquals("A", response1.body().string());
Request request1 = new Request.Builder()
.url(url)
.header("accept-language", "fr-CA")
.build();
Response response2 = client.newCall(request1).execute();
assertEquals("A", response2.body().string());
}
@Test public void varyMultipleFieldsWithMatch() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language, Accept-Charset")
.addHeader("Vary: Accept-Encoding")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.header("Accept-Charset", "UTF-8")
.header("Accept-Encoding", "identity")
.build();
Response response1 = client.newCall(request).execute();
assertEquals("A", response1.body().string());
Request request1 = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.header("Accept-Charset", "UTF-8")
.header("Accept-Encoding", "identity")
.build();
Response response2 = client.newCall(request1).execute();
assertEquals("A", response2.body().string());
}
@Test public void varyMultipleFieldsWithNoMatch() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language, Accept-Charset")
.addHeader("Vary: Accept-Encoding")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request frRequest = new Request.Builder()
.url(url)
.header("Accept-Language", "fr-CA")
.header("Accept-Charset", "UTF-8")
.header("Accept-Encoding", "identity")
.build();
Response frResponse = client.newCall(frRequest).execute();
assertEquals("A", frResponse.body().string());
Request enRequest = new Request.Builder()
.url(url)
.header("Accept-Language", "en-CA")
.header("Accept-Charset", "UTF-8")
.header("Accept-Encoding", "identity")
.build();
Response enResponse = client.newCall(enRequest).execute();
assertEquals("B", enResponse.body().string());
}
@Test public void varyMultipleFieldValuesWithMatch() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request1 = new Request.Builder()
.url(url)
.addHeader("Accept-Language", "fr-CA, fr-FR")
.addHeader("Accept-Language", "en-US")
.build();
Response response1 = client.newCall(request1).execute();
assertEquals("A", response1.body().string());
Request request2 = new Request.Builder()
.url(url)
.addHeader("Accept-Language", "fr-CA, fr-FR")
.addHeader("Accept-Language", "en-US")
.build();
Response response2 = client.newCall(request2).execute();
assertEquals("A", response2.body().string());
}
@Test public void varyMultipleFieldValuesWithNoMatch() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
Request request1 = new Request.Builder()
.url(url)
.addHeader("Accept-Language", "fr-CA, fr-FR")
.addHeader("Accept-Language", "en-US")
.build();
Response response1 = client.newCall(request1).execute();
assertEquals("A", response1.body().string());
Request request2 = new Request.Builder()
.url(url)
.addHeader("Accept-Language", "fr-CA")
.addHeader("Accept-Language", "en-US")
.build();
Response response2 = client.newCall(request2).execute();
assertEquals("B", response2.body().string());
}
@Test public void varyAsterisk() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: *")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
assertEquals("A", get(server.url("/")).body().string());
assertEquals("B", get(server.url("/")).body().string());
}
@Test public void varyAndHttps() throws Exception {
server.useHttps(sslContext.getSocketFactory(), false);
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.addHeader("Vary: Accept-Language")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
client.setSslSocketFactory(sslContext.getSocketFactory());
client.setHostnameVerifier(NULL_HOSTNAME_VERIFIER);
HttpUrl url = server.url("/");
Request request1 = new Request.Builder()
.url(url)
.header("Accept-Language", "en-US")
.build();
Response response1 = client.newCall(request1).execute();
assertEquals("A", response1.body().string());
Request request2 = new Request.Builder()
.url(url)
.header("Accept-Language", "en-US")
.build();
Response response2 = client.newCall(request2).execute();
assertEquals("A", response2.body().string());
}
@Test public void cachePlusCookies() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Set-Cookie: a=FIRST; domain=" + server.getCookieDomain() + ";")
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.addHeader("Set-Cookie: a=SECOND; domain=" + server.getCookieDomain() + ";")
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
assertCookies(url, "a=FIRST");
assertEquals("A", get(url).body().string());
assertCookies(url, "a=SECOND");
}
@Test public void getHeadersReturnsNetworkEndToEndHeaders() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Allow: GET, HEAD")
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.addHeader("Allow: GET, HEAD, PUT")
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
Response response1 = get(server.url("/"));
assertEquals("A", response1.body().string());
assertEquals("GET, HEAD", response1.header("Allow"));
Response response2 = get(server.url("/"));
assertEquals("A", response2.body().string());
assertEquals("GET, HEAD, PUT", response2.header("Allow"));
}
@Test public void getHeadersReturnsCachedHopByHopHeaders() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Transfer-Encoding: identity")
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.addHeader("Transfer-Encoding: none")
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
Response response1 = get(server.url("/"));
assertEquals("A", response1.body().string());
assertEquals("identity", response1.header("Transfer-Encoding"));
Response response2 = get(server.url("/"));
assertEquals("A", response2.body().string());
assertEquals("identity", response2.header("Transfer-Encoding"));
}
@Test public void getHeadersDeletesCached100LevelWarnings() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Warning: 199 test danger")
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
Response response1 = get(server.url("/"));
assertEquals("A", response1.body().string());
assertEquals("199 test danger", response1.header("Warning"));
Response response2 = get(server.url("/"));
assertEquals("A", response2.body().string());
assertEquals(null, response2.header("Warning"));
}
@Test public void getHeadersRetainsCached200LevelWarnings() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Warning: 299 test danger")
.addHeader("Last-Modified: " + formatDate(-1, TimeUnit.HOURS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
Response response1 = get(server.url("/"));
assertEquals("A", response1.body().string());
assertEquals("299 test danger", response1.header("Warning"));
Response response2 = get(server.url("/"));
assertEquals("A", response2.body().string());
assertEquals("299 test danger", response2.header("Warning"));
}
public void assertCookies(HttpUrl url, String... expectedCookies) throws Exception {
List<String> actualCookies = new ArrayList<>();
for (HttpCookie cookie : cookieManager.getCookieStore().get(url.uri())) {
actualCookies.add(cookie.toString());
}
assertEquals(Arrays.asList(expectedCookies), actualCookies);
}
@Test public void doNotCachePartialResponse() throws Exception {
assertNotCached(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_PARTIAL)
.addHeader("Date: " + formatDate(0, TimeUnit.HOURS))
.addHeader("Content-Range: bytes 100-100/200")
.addHeader("Cache-Control: max-age=60"));
}
@Test public void conditionalHitUpdatesCache() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Last-Modified: " + formatDate(0, TimeUnit.SECONDS))
.addHeader("Cache-Control: max-age=0")
.setBody("A"));
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=30")
.addHeader("Allow: GET, HEAD")
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
server.enqueue(new MockResponse()
.setBody("B"));
// cache miss; seed the cache
Response response1 = get(server.url("/a"));
assertEquals("A", response1.body().string());
assertEquals(null, response1.header("Allow"));
// conditional cache hit; update the cache
Response response2 = get(server.url("/a"));
assertEquals(HttpURLConnection.HTTP_OK, response2.code());
assertEquals("A", response2.body().string());
assertEquals("GET, HEAD", response2.header("Allow"));
// full cache hit
Response response3 = get(server.url("/a"));
assertEquals("A", response3.body().string());
assertEquals("GET, HEAD", response3.header("Allow"));
assertEquals(2, server.getRequestCount());
}
@Test public void responseSourceHeaderCached() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=30")
.addHeader("Date: " + formatDate(0, TimeUnit.MINUTES)));
assertEquals("A", get(server.url("/")).body().string());
Request request = new Request.Builder()
.url(server.url("/")).header("Cache-Control", "only-if-cached")
.build();
Response response = client.newCall(request).execute();
assertEquals("A", response.body().string());
}
@Test public void responseSourceHeaderConditionalCacheFetched() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=30")
.addHeader("Date: " + formatDate(-31, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setBody("B")
.addHeader("Cache-Control: max-age=30")
.addHeader("Date: " + formatDate(0, TimeUnit.MINUTES)));
assertEquals("A", get(server.url("/")).body().string());
Response response = get(server.url("/"));
assertEquals("B", response.body().string());
}
@Test public void responseSourceHeaderConditionalCacheNotFetched() throws IOException {
server.enqueue(new MockResponse()
.setBody("A")
.addHeader("Cache-Control: max-age=0")
.addHeader("Date: " + formatDate(0, TimeUnit.MINUTES)));
server.enqueue(new MockResponse()
.setResponseCode(304));
assertEquals("A", get(server.url("/")).body().string());
Response response = get(server.url("/"));
assertEquals("A", response.body().string());
}
@Test public void responseSourceHeaderFetched() throws IOException {
server.enqueue(new MockResponse()
.setBody("A"));
Response response = get(server.url("/"));
assertEquals("A", response.body().string());
}
@Test public void emptyResponseHeaderNameFromCacheIsLenient() throws Exception {
Headers.Builder headers = new Headers.Builder()
.add("Cache-Control: max-age=120");
Internal.instance.addLenient(headers, ": A");
server.enqueue(new MockResponse()
.setHeaders(headers.build())
.setBody("body"));
Response response = get(server.url("/"));
assertEquals("A", response.header(""));
assertEquals("body", response.body().string());
}
/**
* Old implementations of OkHttp's response cache wrote header fields like
* ":status: 200 OK". This broke our cached response parser because it split
* on the first colon. This regression test exists to help us read these old
* bad cache entries.
*
* https://github.com/square/okhttp/issues/227
*/
@Test public void testGoldenCacheResponse() throws Exception {
cache.close();
server.enqueue(new MockResponse()
.clearHeaders()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
HttpUrl url = server.url("/");
String urlKey = Util.md5Hex(url.toString());
String entryMetadata = ""
+ "" + url + "\n"
+ "GET\n"
+ "0\n"
+ "HTTP/1.1 200 OK\n"
+ "7\n"
+ ":status: 200 OK\n"
+ ":version: HTTP/1.1\n"
+ "etag: foo\n"
+ "content-length: 3\n"
+ "OkHttp-Received-Millis: " + System.currentTimeMillis() + "\n"
+ "X-Android-Response-Source: NETWORK 200\n"
+ "OkHttp-Sent-Millis: " + System.currentTimeMillis() + "\n"
+ "\n"
+ "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA\n"
+ "1\n"
+ "MIIBpDCCAQ2gAwIBAgIBATANBgkqhkiG9w0BAQsFADAYMRYwFAYDVQQDEw1qd2lsc29uLmxvY2FsMB4XDTEzMDgy"
+ "OTA1MDE1OVoXDTEzMDgzMDA1MDE1OVowGDEWMBQGA1UEAxMNandpbHNvbi5sb2NhbDCBnzANBgkqhkiG9w0BAQEF"
+ "AAOBjQAwgYkCgYEAlFW+rGo/YikCcRghOyKkJanmVmJSce/p2/jH1QvNIFKizZdh8AKNwojt3ywRWaDULA/RlCUc"
+ "ltF3HGNsCyjQI/+Lf40x7JpxXF8oim1E6EtDoYtGWAseelawus3IQ13nmo6nWzfyCA55KhAWf4VipelEy8DjcuFK"
+ "v6L0xwXnI0ECAwEAATANBgkqhkiG9w0BAQsFAAOBgQAuluNyPo1HksU3+Mr/PyRQIQS4BI7pRXN8mcejXmqyscdP"
+ "7S6J21FBFeRR8/XNjVOp4HT9uSc2hrRtTEHEZCmpyoxixbnM706ikTmC7SN/GgM+SmcoJ1ipJcNcl8N0X6zym4dm"
+ "yFfXKHu2PkTo7QFdpOJFvP3lIigcSZXozfmEDg==\n"
+ "-1\n";
String entryBody = "abc";
String journalBody = ""
+ "libcore.io.DiskLruCache\n"
+ "1\n"
+ "201105\n"
+ "2\n"
+ "\n"
+ "CLEAN " + urlKey + " " + entryMetadata.length() + " " + entryBody.length() + "\n";
writeFile(cache.getDirectory(), urlKey + ".0", entryMetadata);
writeFile(cache.getDirectory(), urlKey + ".1", entryBody);
writeFile(cache.getDirectory(), "journal", journalBody);
cache = new Cache(cache.getDirectory(), Integer.MAX_VALUE, fileSystem);
client.setCache(cache);
Response response = get(url);
assertEquals(entryBody, response.body().string());
assertEquals("3", response.header("Content-Length"));
assertEquals("foo", response.header("etag"));
}
@Test public void evictAll() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
client.getCache().evictAll();
assertEquals(0, client.getCache().getSize());
assertEquals("B", get(url).body().string());
}
@Test public void networkInterceptorInvokedForConditionalGet() throws Exception {
server.enqueue(new MockResponse()
.addHeader("ETag: v1")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
// Seed the cache.
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
final AtomicReference<String> ifNoneMatch = new AtomicReference<>();
client.networkInterceptors().add(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
ifNoneMatch.compareAndSet(null, chain.request().header("If-None-Match"));
return chain.proceed(chain.request());
}
});
// Confirm the value is cached and intercepted.
assertEquals("A", get(url).body().string());
assertEquals("v1", ifNoneMatch.get());
}
@Test public void networkInterceptorNotInvokedForFullyCached() throws Exception {
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("A"));
// Seed the cache.
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
// Confirm the interceptor isn't exercised.
client.networkInterceptors().add(new Interceptor() {
@Override public Response intercept(Chain chain) throws IOException {
throw new AssertionError();
}
});
assertEquals("A", get(url).body().string());
}
@Test public void iterateCache() throws Exception {
// Put some responses in the cache.
server.enqueue(new MockResponse()
.setBody("a"));
HttpUrl urlA = server.url("/a");
assertEquals("a", get(urlA).body().string());
server.enqueue(new MockResponse()
.setBody("b"));
HttpUrl urlB = server.url("/b");
assertEquals("b", get(urlB).body().string());
server.enqueue(new MockResponse()
.setBody("c"));
HttpUrl urlC = server.url("/c");
assertEquals("c", get(urlC).body().string());
// Confirm the iterator returns those responses...
Iterator<String> i = cache.urls();
assertTrue(i.hasNext());
assertEquals(urlA.toString(), i.next());
assertTrue(i.hasNext());
assertEquals(urlB.toString(), i.next());
assertTrue(i.hasNext());
assertEquals(urlC.toString(), i.next());
// ... and nothing else.
assertFalse(i.hasNext());
try {
i.next();
fail();
} catch (NoSuchElementException expected) {
}
}
@Test public void iteratorRemoveFromCache() throws Exception {
// Put a response in the cache.
server.enqueue(new MockResponse()
.addHeader("Cache-Control: max-age=60")
.setBody("a"));
HttpUrl url = server.url("/a");
assertEquals("a", get(url).body().string());
// Remove it with iteration.
Iterator<String> i = cache.urls();
assertEquals(url.toString(), i.next());
i.remove();
// Confirm that subsequent requests suffer a cache miss.
server.enqueue(new MockResponse()
.setBody("b"));
assertEquals("b", get(url).body().string());
}
@Test public void iteratorRemoveWithoutNextThrows() throws Exception {
// Put a response in the cache.
server.enqueue(new MockResponse()
.setBody("a"));
HttpUrl url = server.url("/a");
assertEquals("a", get(url).body().string());
Iterator<String> i = cache.urls();
assertTrue(i.hasNext());
try {
i.remove();
fail();
} catch (IllegalStateException expected) {
}
}
@Test public void iteratorRemoveOncePerCallToNext() throws Exception {
// Put a response in the cache.
server.enqueue(new MockResponse()
.setBody("a"));
HttpUrl url = server.url("/a");
assertEquals("a", get(url).body().string());
Iterator<String> i = cache.urls();
assertEquals(url.toString(), i.next());
i.remove();
// Too many calls to remove().
try {
i.remove();
fail();
} catch (IllegalStateException expected) {
}
}
@Test public void elementEvictedBetweenHasNextAndNext() throws Exception {
// Put a response in the cache.
server.enqueue(new MockResponse()
.setBody("a"));
HttpUrl url = server.url("/a");
assertEquals("a", get(url).body().string());
// The URL will remain available if hasNext() returned true...
Iterator<String> i = cache.urls();
assertTrue(i.hasNext());
// ...so even when we evict the element, we still get something back.
cache.evictAll();
assertEquals(url.toString(), i.next());
// Remove does nothing. But most importantly, it doesn't throw!
i.remove();
}
@Test public void elementEvictedBeforeHasNextIsOmitted() throws Exception {
// Put a response in the cache.
server.enqueue(new MockResponse()
.setBody("a"));
HttpUrl url = server.url("/a");
assertEquals("a", get(url).body().string());
Iterator<String> i = cache.urls();
cache.evictAll();
// The URL was evicted before hasNext() made any promises.
assertFalse(i.hasNext());
try {
i.next();
fail();
} catch (NoSuchElementException expected) {
}
}
/** Test https://github.com/square/okhttp/issues/1712. */
@Test public void conditionalMissUpdatesCache() throws Exception {
server.enqueue(new MockResponse()
.addHeader("ETag: v1")
.setBody("A"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
server.enqueue(new MockResponse()
.addHeader("ETag: v2")
.setBody("B"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
assertEquals("A", get(url).body().string());
assertEquals("B", get(url).body().string());
assertEquals("B", get(url).body().string());
assertEquals(null, server.takeRequest().getHeader("If-None-Match"));
assertEquals("v1", server.takeRequest().getHeader("If-None-Match"));
assertEquals("v1", server.takeRequest().getHeader("If-None-Match"));
assertEquals("v2", server.takeRequest().getHeader("If-None-Match"));
}
private Response get(HttpUrl url) throws IOException {
Request request = new Request.Builder()
.url(url)
.build();
return client.newCall(request).execute();
}
private void writeFile(File directory, String file, String content) throws IOException {
BufferedSink sink = Okio.buffer(fileSystem.sink(new File(directory, file)));
sink.writeUtf8(content);
sink.close();
}
/**
* @param delta the offset from the current date to use. Negative
* values yield dates in the past; positive values yield dates in the
* future.
*/
private String formatDate(long delta, TimeUnit timeUnit) {
return formatDate(new Date(System.currentTimeMillis() + timeUnit.toMillis(delta)));
}
private String formatDate(Date date) {
DateFormat rfc1123 = new SimpleDateFormat("EEE, dd MMM yyyy HH:mm:ss zzz", Locale.US);
rfc1123.setTimeZone(TimeZone.getTimeZone("GMT"));
return rfc1123.format(date);
}
private void assertNotCached(MockResponse response) throws Exception {
server.enqueue(response.setBody("A"));
server.enqueue(new MockResponse()
.setBody("B"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
assertEquals("B", get(url).body().string());
}
/** @return the request with the conditional get headers. */
private RecordedRequest assertConditionallyCached(MockResponse response) throws Exception {
// scenario 1: condition succeeds
server.enqueue(response.setBody("A").setStatus("HTTP/1.1 200 A-OK"));
server.enqueue(new MockResponse()
.setResponseCode(HttpURLConnection.HTTP_NOT_MODIFIED));
// scenario 2: condition fails
server.enqueue(response.setBody("B")
.setStatus("HTTP/1.1 200 B-OK"));
server.enqueue(new MockResponse()
.setStatus("HTTP/1.1 200 C-OK")
.setBody("C"));
HttpUrl valid = server.url("/valid");
Response response1 = get(valid);
assertEquals("A", response1.body().string());
assertEquals(HttpURLConnection.HTTP_OK, response1.code());
assertEquals("A-OK", response1.message());
Response response2 = get(valid);
assertEquals("A", response2.body().string());
assertEquals(HttpURLConnection.HTTP_OK, response2.code());
assertEquals("A-OK", response2.message());
HttpUrl invalid = server.url("/invalid");
Response response3 = get(invalid);
assertEquals("B", response3.body().string());
assertEquals(HttpURLConnection.HTTP_OK, response3.code());
assertEquals("B-OK", response3.message());
Response response4 = get(invalid);
assertEquals("C", response4.body().string());
assertEquals(HttpURLConnection.HTTP_OK, response4.code());
assertEquals("C-OK", response4.message());
server.takeRequest(); // regular get
return server.takeRequest(); // conditional get
}
private void assertFullyCached(MockResponse response) throws Exception {
server.enqueue(response.setBody("A"));
server.enqueue(response.setBody("B"));
HttpUrl url = server.url("/");
assertEquals("A", get(url).body().string());
assertEquals("A", get(url).body().string());
}
/**
* Shortens the body of {@code response} but not the corresponding headers.
* Only useful to test how clients respond to the premature conclusion of
* the HTTP body.
*/
private MockResponse truncateViolently(MockResponse response, int numBytesToKeep) {
response.setSocketPolicy(DISCONNECT_AT_END);
Headers headers = response.getHeaders();
Buffer truncatedBody = new Buffer();
truncatedBody.write(response.getBody(), numBytesToKeep);
response.setBody(truncatedBody);
response.setHeaders(headers);
return response;
}
enum TransferKind {
CHUNKED() {
@Override void setBody(MockResponse response, Buffer content, int chunkSize)
throws IOException {
response.setChunkedBody(content, chunkSize);
}
},
FIXED_LENGTH() {
@Override void setBody(MockResponse response, Buffer content, int chunkSize) {
response.setBody(content);
}
},
END_OF_STREAM() {
@Override void setBody(MockResponse response, Buffer content, int chunkSize) {
response.setBody(content);
response.setSocketPolicy(DISCONNECT_AT_END);
response.removeHeader("Content-Length");
}
};
abstract void setBody(MockResponse response, Buffer content, int chunkSize) throws IOException;
void setBody(MockResponse response, String content, int chunkSize) throws IOException {
setBody(response, new Buffer().writeUtf8(content), chunkSize);
}
}
/** Returns a gzipped copy of {@code bytes}. */
public Buffer gzip(String data) throws IOException {
Buffer result = new Buffer();
BufferedSink sink = Okio.buffer(new GzipSink(result));
sink.writeUtf8(data);
sink.close();
return result;
}
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.misiontic.project01.ordenamientojava;
import java.util.Arrays;
/**
*
* @author RUIB04
*/
public class Main {
public static void main(String[] args) {
int[] vector = {0, 2, 1, 65, 66, 78, 12, 11, 90, 13};
int[] vectorA = new int[vector.length/2];
int[] vectorB = new int[vector.length/2];
int[] vectorC = new int[vectorB.length];
int[] vectorOrdenado = new int[vectorA.length + vectorC.length];
System.arraycopy(vector,0,vectorA,0,vectorA.length);
System.arraycopy(vector,vector.length/2,vectorB,0,vectorB.length);
for(int n : vector) {
System.out.println(n);
}
System.out.println("Array A");
Arrays.sort(vectorA);
for(int n : vectorA) {
System.out.println(n);
}
System.out.println("Array B");
Arrays.sort(vectorB);
for(int n : vectorB) {
System.out.println(n);
}
System.out.println("Array C");
for (int i = vectorB.length - 1; i >= 0; i--) {
int aux = 0;
Arrays.sort(vectorB);
int varEmerg = ((vectorB.length -1) - i);
aux = vectorB[i];
vectorC[varEmerg] = aux;
}
for(int n : vectorC) {
System.out.println(n);
}
System.out.println("Array Ordenado");
System.arraycopy(vectorA,0,vectorOrdenado,0,vectorA.length);
System.arraycopy(vectorC,0,vectorOrdenado,vectorA.length,vectorB.length);
for(int n : vectorOrdenado) {
System.out.println(n);
}
}
}
|
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.streamnative.pulsar.handlers.mqtt.mqtt3.fusesource.base;
import static org.mockito.Mockito.verify;
import com.google.gson.Gson;
import com.google.gson.internal.LinkedTreeMap;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.streamnative.pulsar.handlers.mqtt.MQTTServerConfiguration;
import io.streamnative.pulsar.handlers.mqtt.base.MQTTTestBase;
import io.streamnative.pulsar.handlers.mqtt.mqtt3.fusesource.psk.PSKClient;
import io.streamnative.pulsar.handlers.mqtt.utils.PulsarTopicUtils;
import java.io.BufferedReader;
import java.io.EOFException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.UUID;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import lombok.SneakyThrows;
import lombok.extern.slf4j.Slf4j;
import org.apache.http.HttpResponse;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.HttpClientBuilder;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.common.naming.TopicDomain;
import org.awaitility.Awaitility;
import org.fusesource.mqtt.client.BlockingConnection;
import org.fusesource.mqtt.client.MQTT;
import org.fusesource.mqtt.client.Message;
import org.fusesource.mqtt.client.QoS;
import org.fusesource.mqtt.client.Topic;
import org.mockito.Mockito;
import org.testng.Assert;
import org.testng.annotations.Test;
/**
* Simple integration tests for MQTT protocol handler.
*/
@Slf4j
public class SimpleIntegrationTest extends MQTTTestBase {
private final int numMessages = 1000;
@Override
protected MQTTServerConfiguration initConfig() throws Exception {
MQTTServerConfiguration mqtt = super.initConfig();
mqtt.setTlsPskEnabled(true);
mqtt.setTlsPskIdentityHint("alpha");
mqtt.setTlsPskIdentity("mqtt:mqtt123");
return mqtt;
}
@Test(dataProvider = "mqttTopicNames", timeOut = TIMEOUT)
public void testSimpleMqttPubAndSubQos0(String topicName) throws Exception {
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_MOST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
connection.publish(topicName, message.getBytes(), QoS.AT_MOST_ONCE, false);
Message received = connection.receive();
Assert.assertEquals(received.getTopic(), topicName);
Assert.assertEquals(new String(received.getPayload()), message);
received.ack();
connection.disconnect();
}
@Test(dataProvider = "mqttTopicNames", timeOut = TIMEOUT)
public void testSimpleMqttPubAndSubQos1(String topicName) throws Exception {
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
connection.publish(topicName, message.getBytes(), QoS.AT_LEAST_ONCE, false);
Message received = connection.receive();
Assert.assertEquals(received.getTopic(), topicName);
Assert.assertEquals(new String(received.getPayload()), message);
received.ack();
connection.disconnect();
}
@Test(dataProvider = "mqttTopicNames", timeOut = TIMEOUT)
public void testSendByMqttAndReceiveByPulsar(String topic) throws Exception {
Consumer<byte[]> consumer = pulsarClient.newConsumer()
.topic(PulsarTopicUtils.getEncodedPulsarTopicName(topic, "public", "default", TopicDomain.persistent))
.subscriptionName("my-sub")
.subscribe();
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
String message = "Hello MQTT";
connection.publish(topic, message.getBytes(), QoS.AT_LEAST_ONCE, false);
org.apache.pulsar.client.api.Message<byte[]> received = consumer.receive();
Assert.assertNotNull(received);
Assert.assertEquals(new String(received.getValue()), message);
consumer.acknowledge(received);
consumer.close();
connection.disconnect();
}
@Test(dataProvider = "batchEnabled", timeOut = TIMEOUT)
public void testSendByPulsarAndReceiveByMqtt(boolean batchEnabled) throws Exception {
final String topicName = "persistent://public/default/testSendByPulsarAndReceiveByMqtt";
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection.subscribe(topics);
Producer<String> producer = pulsarClient.newProducer(Schema.STRING)
.topic(topicName)
.enableBatching(batchEnabled)
.create();
String message = "Hello MQTT";
producer.newMessage().value(message).sendAsync();
Message received = connection.receive();
Assert.assertEquals(received.getTopic(), topicName);
Assert.assertEquals(new String(received.getPayload()), message);
received.ack();
connection.disconnect();
producer.close();
}
@Test(timeOut = TIMEOUT)
public void testBacklogShouldBeZeroWithQos0() throws Exception {
final String topicName = "persistent://public/default/testBacklogShouldBeZeroWithQos0";
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_MOST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
for (int i = 0; i < numMessages; i++) {
connection.publish(topicName, (message + i).getBytes(), QoS.AT_MOST_ONCE, false);
}
for (int i = 0; i < numMessages; i++) {
Message received = connection.receive();
Assert.assertEquals(new String(received.getPayload()), (message + i));
}
Assert.assertEquals(admin.topics().getStats(topicName).getSubscriptions().size(), 1);
Assert.assertEquals(admin.topics().getStats(topicName)
.getSubscriptions().entrySet().iterator().next().getValue().getMsgBacklog(), 0);
connection.disconnect();
}
@Test(timeOut = TIMEOUT)
public void testBacklogShouldBeZeroWithQos1() throws Exception {
final String topicName = "persistent://public/default/testBacklogShouldBeZeroWithQos1";
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
for (int i = 0; i < numMessages; i++) {
connection.publish(topicName, (message + i).getBytes(), QoS.AT_LEAST_ONCE, false);
}
for (int i = 0; i < numMessages; i++) {
Message received = connection.receive();
Assert.assertEquals(new String(received.getPayload()), (message + i));
received.ack();
}
Thread.sleep(1000);
Assert.assertEquals(admin.topics().getStats(topicName).getSubscriptions().size(), 1);
Assert.assertEquals(admin.topics().getStats(topicName)
.getSubscriptions().entrySet().iterator().next().getValue().getMsgBacklog(), 0);
connection.disconnect();
}
@Test(timeOut = TIMEOUT)
public void testBacklogShouldBeZeroWithQos0AndSendByPulsar() throws Exception {
final String topicName = "persistent://public/default/testBacklogShouldBeZeroWithQos0AndSendByPulsar-";
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_MOST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
Producer<String> producer = pulsarClient.newProducer(Schema.STRING)
.topic(topicName)
.blockIfQueueFull(true)
.enableBatching(false)
.create();
for (int i = 0; i < numMessages; i++) {
producer.sendAsync(message + i);
}
for (int i = 0; i < numMessages; i++) {
Message received = connection.receive();
Assert.assertEquals(new String(received.getPayload()), (message + i));
}
Assert.assertEquals(admin.topics().getStats(topicName).getSubscriptions().size(), 1);
Assert.assertEquals(admin.topics().getStats(topicName)
.getSubscriptions().entrySet().iterator().next().getValue().getMsgBacklog(), 0);
connection.disconnect();
}
@Test(timeOut = TIMEOUT)
public void testBacklogShouldBeZeroWithQos1AndSendByPulsar() throws Exception {
final String topicName = "persistent://public/default/testBacklogShouldBeZeroWithQos1AndSendByPulsar";
MQTT mqtt = createMQTTClient();
BlockingConnection connection = mqtt.blockingConnection();
connection.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection.subscribe(topics);
String message = "Hello MQTT";
Producer<String> producer = pulsarClient.newProducer(Schema.STRING)
.topic(topicName)
.blockIfQueueFull(true)
.enableBatching(false)
.create();
for (int i = 0; i < numMessages; i++) {
producer.sendAsync(message + i);
}
for (int i = 0; i < numMessages; i++) {
Message received = connection.receive();
Assert.assertEquals(received.getTopic(), topicName);
Assert.assertEquals(new String(received.getPayload()), (message + i));
received.ack();
}
Assert.assertEquals(admin.topics().getStats(topicName).getSubscriptions().size(), 1);
Awaitility.await().atMost(3, TimeUnit.SECONDS).untilAsserted(() ->
Assert.assertEquals(admin.topics().getStats(topicName)
.getSubscriptions().entrySet().iterator().next().getValue().getMsgBacklog(), 0));
connection.disconnect();
}
@Test(timeOut = TIMEOUT)
public void testSubscribeRejectionWithSameClientId() throws Exception {
final String topicName = "persistent://public/default/testSubscribeWithSameClientId";
MQTT mqtt = createMQTTClient();
mqtt.setClientId("client-id-0");
BlockingConnection connection1 = mqtt.blockingConnection();
connection1.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection1.subscribe(topics);
Assert.assertTrue(connection1.isConnected());
BlockingConnection connection2;
MQTT mqtt2 = createMQTTClient();
mqtt.setClientId("client-id-0");
connection2 = mqtt2.blockingConnection();
connection2.connect();
Assert.assertTrue(connection2.isConnected());
Awaitility.await().untilAsserted(() -> Assert.assertTrue(connection1.isConnected()));
connection2.subscribe(topics);
connection2.disconnect();
connection1.disconnect();
}
@Test(timeOut = TIMEOUT)
public void testSubscribeWithSameClientId() throws Exception {
final String topicName = "persistent://public/default/testSubscribeWithSameClientId";
MQTT mqtt = createMQTTClient();
mqtt.setClientId("client-id-1");
BlockingConnection connection1 = mqtt.blockingConnection();
connection1.connect();
Topic[] topics = { new Topic(topicName, QoS.AT_LEAST_ONCE) };
connection1.subscribe(topics);
Assert.assertTrue(connection1.isConnected());
connection1.disconnect();
Awaitility.await().atMost(3, TimeUnit.SECONDS).untilAsserted(() ->
Assert.assertFalse(connection1.isConnected()));
BlockingConnection connection2 = mqtt.blockingConnection();
connection2.connect();
connection2.subscribe(topics);
Assert.assertTrue(connection2.isConnected());
connection2.disconnect();
}
@Test
public void testSubscribeWithTopicFilter() throws Exception {
String t1 = "a/b/c";
String t2 = "a/b/c/d";
MQTT mqtt0 = createMQTTClient();
BlockingConnection connection0 = mqtt0.blockingConnection();
connection0.connect();
Topic[] topics0 = { new Topic(t1, QoS.AT_LEAST_ONCE), new Topic(t2, QoS.AT_LEAST_ONCE) };
connection0.subscribe(topics0);
byte[] message = "Hello MQTT Proxy".getBytes(StandardCharsets.UTF_8);
connection0.publish(t1, message, QoS.AT_MOST_ONCE, false);
connection0.publish(t2, message, QoS.AT_MOST_ONCE, false);
Message received = connection0.receive();
Assert.assertEquals(received.getPayload(), message);
received = connection0.receive();
Assert.assertEquals(received.getPayload(), message);
MQTT mqtt1 = createMQTTClient();
BlockingConnection connection1 = mqtt1.blockingConnection();
connection1.connect();
Topic[] topics1 = { new Topic("a/b/#", QoS.AT_LEAST_ONCE)};
connection1.subscribe(topics1);
connection1.publish(t1, message, QoS.AT_MOST_ONCE, false);
connection1.publish(t2, message, QoS.AT_MOST_ONCE, false);
received = connection1.receive();
Assert.assertEquals(received.getPayload(), message);
received = connection1.receive();
Assert.assertEquals(received.getPayload(), message);
connection0.disconnect();
connection1.disconnect();
MQTT mqtt2 = createMQTTClient();
BlockingConnection connection2 = mqtt2.blockingConnection();
connection2.connect();
Topic[] topics2 = { new Topic("a/+/c", QoS.AT_LEAST_ONCE), new Topic("a/+/c/#", QoS.AT_LEAST_ONCE)};
connection2.subscribe(topics2);
connection2.publish(t1, message, QoS.AT_MOST_ONCE, false);
connection2.publish(t2, message, QoS.AT_MOST_ONCE, false);
received = connection2.receive();
Assert.assertEquals(received.getPayload(), message);
received = connection2.receive();
Assert.assertEquals(received.getPayload(), message);
connection2.disconnect();
}
@Test(expectedExceptions = {EOFException.class, IllegalStateException.class})
public void testInvalidClientId() throws Exception {
MQTT mqtt = createMQTTClient();
mqtt.setConnectAttemptsMax(1);
// ClientId is invalid, for max length is 23 in mqtt 3.1
mqtt.setClientId(UUID.randomUUID().toString().replace("-", ""));
BlockingConnection connection = Mockito.spy(mqtt.blockingConnection());
connection.connect();
verify(connection, Mockito.times(2)).connect();
}
@Test
@SneakyThrows
public void testTlsPskWithTlsv1() {
Bootstrap client = new Bootstrap();
EventLoopGroup group = new NioEventLoopGroup();
client.group(group);
client.channel(NioSocketChannel.class);
client.handler(new PSKClient("alpha", "mqtt", "mqtt123"));
AtomicBoolean connected = new AtomicBoolean(false);
CountDownLatch latch = new CountDownLatch(1);
client.connect("localhost", mqttBrokerPortTlsPskList.get(0)).addListener((ChannelFutureListener) future -> {
connected.set(future.isSuccess());
latch.countDown();
});
latch.await();
Assert.assertTrue(connected.get());
}
@Test
@SneakyThrows
public void testServlet() {
HttpClient httpClient = HttpClientBuilder.create().build();
final String mopEndPoint = "http://localhost:" + brokerWebservicePortList.get(0) + "/mop-stats";
HttpResponse response = httpClient.execute(new HttpGet(mopEndPoint));
InputStream inputStream = response.getEntity().getContent();
InputStreamReader isReader = new InputStreamReader(inputStream);
BufferedReader reader = new BufferedReader(isReader);
StringBuffer buffer = new StringBuffer();
String str;
while ((str = reader.readLine()) != null){
buffer.append(str);
}
Assert.assertTrue(buffer.toString().contains("active"));
Assert.assertTrue(buffer.toString().contains("active_clients"));
}
@Test
@SneakyThrows
public void testConsumerDisconnectNotMissingMessage() {
String topic = "disconnectNotMissingMessage";
int total = 1000;
AtomicInteger receivedCount = new AtomicInteger(0);
String msgPrefix = "Hello MQTT Proxy- ";
CountDownLatch latch = new CountDownLatch(1);
CountDownLatch stop = new CountDownLatch(1);
Thread consumerThread = new Thread(new Runnable() {
@Override
public void run() {
try {
int segment = 100;
int recCount = 0;
int start = 0;
MQTT mqttConsumer = createMQTTClient();
mqttConsumer.setCleanSession(false);
mqttConsumer.setClientId("theSameClientId");
for (int l = 0; l < total / segment; l++) {
BlockingConnection consumer = mqttConsumer.blockingConnection();
consumer.connect();
Topic[] topics = { new Topic(topic, QoS.AT_LEAST_ONCE)};
consumer.subscribe(topics);
latch.countDown();
for (int i = start; i < segment + start; i++) {
Message received = consumer.receive();
received.ack();
recCount++;
receivedCount.incrementAndGet();
}
start = recCount;
consumer.disconnect();
}
Assert.assertEquals(start, total);
stop.countDown();
} catch (Throwable ex) {
log.error("consumer error", ex);
}
}
});
consumerThread.start();
MQTT mqttProducer = createMQTTClient();
BlockingConnection producer = mqttProducer.blockingConnection();
producer.connect();
latch.await();
for (int i = 0; i < total; i++) {
byte[] message = (msgPrefix + i).getBytes(StandardCharsets.UTF_8);
producer.publish(topic, message, QoS.AT_MOST_ONCE, false);
}
producer.disconnect();
stop.await(5, TimeUnit.MINUTES);
Assert.assertEquals(receivedCount.get(), total);
}
@Test
@SneakyThrows
public void testCleanSession() {
String topic = "cleanSession";
Topic[] topics = { new Topic(topic, QoS.AT_LEAST_ONCE)};
MQTT mqttConsumer = createMQTTClient();
mqttConsumer.setClientId("keepTheSameClientId");
mqttConsumer.setConnectAttemptsMax(0);
mqttConsumer.setReconnectAttemptsMax(0);
mqttConsumer.setKeepAlive((short) 3);
mqttConsumer.setCleanSession(true);
BlockingConnection consumer = mqttConsumer.blockingConnection();
consumer.connect();
consumer.subscribe(topics);
// Producer
MQTT mqttProducer = createMQTTClient();
BlockingConnection producer = mqttProducer.blockingConnection();
producer.connect();
producer.publish(topic, "Hello MQTT".getBytes(StandardCharsets.UTF_8), QoS.AT_MOST_ONCE, false);
consumer.receive();
consumer.suspend();
Thread.sleep(3000 * 2); // Sleep 2 times of setKeepAlive.
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(pulsarServiceList.get(0).getAdminClient().topics().getSubscriptions(topic).size(), 0);
});
}
@Test(priority = -1)
public void testConnectionWithSameClientId() throws Exception {
MQTT mqttProducer = createMQTTClient();
mqttProducer.setClientId("client1");
mqttProducer.setUserName("clientUser1");
mqttProducer.setConnectAttemptsMax(0);
mqttProducer.setReconnectAttemptsMax(0);
BlockingConnection producer1 = mqttProducer.blockingConnection();
producer1.connect();
// Producer
MQTT mqttProducer2 = createMQTTClient();
mqttProducer2.setClientId("client1");
mqttProducer2.setUserName("clientUser2");
mqttProducer2.setConnectAttemptsMax(0);
mqttProducer2.setReconnectAttemptsMax(0);
BlockingConnection producer2 = mqttProducer2.blockingConnection();
producer2.connect();
//
HttpClient httpClient = HttpClientBuilder.create().build();
final String mopEndPoint = "http://localhost:" + brokerWebservicePortList.get(0) + "/mop-stats";
HttpResponse response = httpClient.execute(new HttpGet(mopEndPoint));
InputStream inputStream = response.getEntity().getContent();
InputStreamReader isReader = new InputStreamReader(inputStream);
BufferedReader reader = new BufferedReader(isReader);
StringBuffer buffer = new StringBuffer();
String str;
while ((str = reader.readLine()) != null){
buffer.append(str);
}
String result = buffer.toString();
LinkedTreeMap treeMap = new Gson().fromJson(result, LinkedTreeMap.class);
LinkedTreeMap clients = (LinkedTreeMap) treeMap.get("clients");
Awaitility.await().untilAsserted(() -> {
Assert.assertEquals(clients.get("active"), 1.0);
Assert.assertEquals(clients.get("total"), 2.0);
});
}
@Test
public void testSubscribeManyTimes() throws Exception {
MQTT mqttConsumer = createMQTTClient();
BlockingConnection consumer = mqttConsumer.blockingConnection();
consumer.connect();
String topicName1 = "subscribeManyTimes1";
String topicName2 = "subscribeManyTimes2";
Topic[] topic1 = { new Topic(topicName1, QoS.AT_LEAST_ONCE)};
Topic[] topic2 = { new Topic(topicName2, QoS.AT_LEAST_ONCE)};
consumer.subscribe(topic1);
consumer.subscribe(topic2);
MQTT mqttProducer = createMQTTClient();
BlockingConnection producer = mqttProducer.blockingConnection();
producer.connect();
String msg1 = "hello topic1";
String msg2 = "hello topic2";
producer.publish(topicName1, msg1.getBytes(StandardCharsets.UTF_8), QoS.AT_MOST_ONCE, false);
producer.publish(topicName2, msg2.getBytes(StandardCharsets.UTF_8), QoS.AT_MOST_ONCE, false);
producer.disconnect();
Message receive1 = consumer.receive();
Message receive2 = consumer.receive();
consumer.disconnect();
Assert.assertEquals(new String(receive1.getPayload()), msg1);
Assert.assertEquals(receive1.getTopic(), topicName1);
Assert.assertEquals(new String(receive2.getPayload()), msg2);
Assert.assertEquals(receive2.getTopic(), topicName2);
}
@Test
public void testLastWillMessage() throws Exception {
MQTT mqttConsumer = createMQTTClient();
BlockingConnection consumer = mqttConsumer.blockingConnection();
consumer.connect();
String topicName1 = "topic-a";
String topicName2 = "will-message-topic";
Topic[] topic1 = { new Topic(topicName1, QoS.AT_LEAST_ONCE)};
Topic[] topic2 = { new Topic(topicName2, QoS.AT_LEAST_ONCE)};
consumer.subscribe(topic1);
consumer.subscribe(topic2);
MQTT mqttProducer = createMQTTClient();
mqttProducer.setWillMessage("offline");
mqttProducer.setWillTopic(topicName2);
mqttProducer.setWillRetain(false);
mqttProducer.setWillQos(QoS.AT_LEAST_ONCE);
BlockingConnection producer = mqttProducer.blockingConnection();
producer.connect();
String msg1 = "hello topic1";
producer.publish(topicName1, msg1.getBytes(StandardCharsets.UTF_8), QoS.AT_MOST_ONCE, false);
Message receive1 = consumer.receive();
producer.disconnect();
Message receive2 = consumer.receive();
consumer.disconnect();
Assert.assertEquals(new String(receive1.getPayload()), msg1);
Assert.assertEquals(receive1.getTopic(), topicName1);
Assert.assertEquals(new String(receive2.getPayload()), "offline");
Assert.assertEquals(receive2.getTopic(), "will-message-topic");
}
}
|
package com.arcrobotics.ftclib.hardware;
import com.arcrobotics.ftclib.geometry.Rotation2d;
public abstract class GyroEx implements HardwareDevice {
public abstract void init();
// Gyro
public abstract double getHeading();
public abstract double getAbsoluteHeading();
public abstract double[] getAngles();
public abstract Rotation2d getRotation2d();
public abstract void reset();
}
|
package net.minecraft.world.gen.structure;
import com.google.common.collect.Lists;
import java.util.Iterator;
import java.util.List;
import java.util.Random;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.item.EntityMinecartChest;
import net.minecraft.init.Blocks;
import net.minecraft.init.Items;
import net.minecraft.item.EnumDyeColor;
import net.minecraft.item.Item;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.tileentity.TileEntityMobSpawner;
import net.minecraft.util.BlockPos;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.WeightedRandomChestContent;
import net.minecraft.world.World;
public class StructureMineshaftPieces
{
private static final List field_175893_a = Lists.newArrayList(new WeightedRandomChestContent[] {new WeightedRandomChestContent(Items.iron_ingot, 0, 1, 5, 10), new WeightedRandomChestContent(Items.gold_ingot, 0, 1, 3, 5), new WeightedRandomChestContent(Items.redstone, 0, 4, 9, 5), new WeightedRandomChestContent(Items.dye, EnumDyeColor.BLUE.getDyeColorDamage(), 4, 9, 5), new WeightedRandomChestContent(Items.diamond, 0, 1, 2, 3), new WeightedRandomChestContent(Items.coal, 0, 3, 8, 10), new WeightedRandomChestContent(Items.bread, 0, 1, 3, 15), new WeightedRandomChestContent(Items.iron_pickaxe, 0, 1, 1, 1), new WeightedRandomChestContent(Item.getItemFromBlock(Blocks.rail), 0, 4, 8, 1), new WeightedRandomChestContent(Items.melon_seeds, 0, 2, 4, 10), new WeightedRandomChestContent(Items.pumpkin_seeds, 0, 2, 4, 10), new WeightedRandomChestContent(Items.saddle, 0, 1, 1, 3), new WeightedRandomChestContent(Items.iron_horse_armor, 0, 1, 1, 1)});
public static void registerStructurePieces()
{
MapGenStructureIO.registerStructureComponent(StructureMineshaftPieces.Corridor.class, "MSCorridor");
MapGenStructureIO.registerStructureComponent(StructureMineshaftPieces.Cross.class, "MSCrossing");
MapGenStructureIO.registerStructureComponent(StructureMineshaftPieces.Room.class, "MSRoom");
MapGenStructureIO.registerStructureComponent(StructureMineshaftPieces.Stairs.class, "MSStairs");
}
private static StructureComponent func_175892_a(List p_175892_0_, Random p_175892_1_, int p_175892_2_, int p_175892_3_, int p_175892_4_, EnumFacing p_175892_5_, int p_175892_6_)
{
int var7 = p_175892_1_.nextInt(100);
StructureBoundingBox var8;
if (var7 >= 80)
{
var8 = StructureMineshaftPieces.Cross.func_175813_a(p_175892_0_, p_175892_1_, p_175892_2_, p_175892_3_, p_175892_4_, p_175892_5_);
if (var8 != null)
{
return new StructureMineshaftPieces.Cross(p_175892_6_, p_175892_1_, var8, p_175892_5_);
}
}
else if (var7 >= 70)
{
var8 = StructureMineshaftPieces.Stairs.func_175812_a(p_175892_0_, p_175892_1_, p_175892_2_, p_175892_3_, p_175892_4_, p_175892_5_);
if (var8 != null)
{
return new StructureMineshaftPieces.Stairs(p_175892_6_, p_175892_1_, var8, p_175892_5_);
}
}
else
{
var8 = StructureMineshaftPieces.Corridor.func_175814_a(p_175892_0_, p_175892_1_, p_175892_2_, p_175892_3_, p_175892_4_, p_175892_5_);
if (var8 != null)
{
return new StructureMineshaftPieces.Corridor(p_175892_6_, p_175892_1_, var8, p_175892_5_);
}
}
return null;
}
private static StructureComponent func_175890_b(StructureComponent p_175890_0_, List p_175890_1_, Random p_175890_2_, int p_175890_3_, int p_175890_4_, int p_175890_5_, EnumFacing p_175890_6_, int p_175890_7_)
{
if (p_175890_7_ > 8)
{
return null;
}
else if (Math.abs(p_175890_3_ - p_175890_0_.getBoundingBox().minX) <= 80 && Math.abs(p_175890_5_ - p_175890_0_.getBoundingBox().minZ) <= 80)
{
StructureComponent var8 = func_175892_a(p_175890_1_, p_175890_2_, p_175890_3_, p_175890_4_, p_175890_5_, p_175890_6_, p_175890_7_ + 1);
if (var8 != null)
{
p_175890_1_.add(var8);
var8.buildComponent(p_175890_0_, p_175890_1_, p_175890_2_);
}
return var8;
}
else
{
return null;
}
}
public static class Corridor extends StructureComponent
{
private boolean hasRails;
private boolean hasSpiders;
private boolean spawnerPlaced;
private int sectionCount;
public Corridor() {}
protected void writeStructureToNBT(NBTTagCompound p_143012_1_)
{
p_143012_1_.setBoolean("hr", this.hasRails);
p_143012_1_.setBoolean("sc", this.hasSpiders);
p_143012_1_.setBoolean("hps", this.spawnerPlaced);
p_143012_1_.setInteger("Num", this.sectionCount);
}
protected void readStructureFromNBT(NBTTagCompound p_143011_1_)
{
this.hasRails = p_143011_1_.getBoolean("hr");
this.hasSpiders = p_143011_1_.getBoolean("sc");
this.spawnerPlaced = p_143011_1_.getBoolean("hps");
this.sectionCount = p_143011_1_.getInteger("Num");
}
public Corridor(int p_i45625_1_, Random p_i45625_2_, StructureBoundingBox p_i45625_3_, EnumFacing p_i45625_4_)
{
super(p_i45625_1_);
this.coordBaseMode = p_i45625_4_;
this.boundingBox = p_i45625_3_;
this.hasRails = p_i45625_2_.nextInt(3) == 0;
this.hasSpiders = !this.hasRails && p_i45625_2_.nextInt(23) == 0;
if (this.coordBaseMode != EnumFacing.NORTH && this.coordBaseMode != EnumFacing.SOUTH)
{
this.sectionCount = p_i45625_3_.getXSize() / 5;
}
else
{
this.sectionCount = p_i45625_3_.getZSize() / 5;
}
}
public static StructureBoundingBox func_175814_a(List p_175814_0_, Random p_175814_1_, int p_175814_2_, int p_175814_3_, int p_175814_4_, EnumFacing p_175814_5_)
{
StructureBoundingBox var6 = new StructureBoundingBox(p_175814_2_, p_175814_3_, p_175814_4_, p_175814_2_, p_175814_3_ + 2, p_175814_4_);
int var7;
for (var7 = p_175814_1_.nextInt(3) + 2; var7 > 0; --var7)
{
int var8 = var7 * 5;
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[p_175814_5_.ordinal()])
{
case 1:
var6.maxX = p_175814_2_ + 2;
var6.minZ = p_175814_4_ - (var8 - 1);
break;
case 2:
var6.maxX = p_175814_2_ + 2;
var6.maxZ = p_175814_4_ + (var8 - 1);
break;
case 3:
var6.minX = p_175814_2_ - (var8 - 1);
var6.maxZ = p_175814_4_ + 2;
break;
case 4:
var6.maxX = p_175814_2_ + (var8 - 1);
var6.maxZ = p_175814_4_ + 2;
}
if (StructureComponent.findIntersecting(p_175814_0_, var6) == null)
{
break;
}
}
return var7 > 0 ? var6 : null;
}
public void buildComponent(StructureComponent p_74861_1_, List p_74861_2_, Random p_74861_3_)
{
int var4 = this.getComponentType();
int var5 = p_74861_3_.nextInt(4);
if (this.coordBaseMode != null)
{
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[this.coordBaseMode.ordinal()])
{
case 1:
if (var5 <= 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ - 1, this.coordBaseMode, var4);
}
else if (var5 == 2)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ, EnumFacing.WEST, var4);
}
else
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ, EnumFacing.EAST, var4);
}
break;
case 2:
if (var5 <= 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.maxZ + 1, this.coordBaseMode, var4);
}
else if (var5 == 2)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.maxZ - 3, EnumFacing.WEST, var4);
}
else
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.maxZ - 3, EnumFacing.EAST, var4);
}
break;
case 3:
if (var5 <= 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ, this.coordBaseMode, var4);
}
else if (var5 == 2)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
}
else
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
}
break;
case 4:
if (var5 <= 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ, this.coordBaseMode, var4);
}
else if (var5 == 2)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX - 3, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
}
else
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX - 3, this.boundingBox.minY - 1 + p_74861_3_.nextInt(3), this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
}
}
}
if (var4 < 8)
{
int var6;
int var7;
if (this.coordBaseMode != EnumFacing.NORTH && this.coordBaseMode != EnumFacing.SOUTH)
{
for (var6 = this.boundingBox.minX + 3; var6 + 3 <= this.boundingBox.maxX; var6 += 5)
{
var7 = p_74861_3_.nextInt(5);
if (var7 == 0)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, var6, this.boundingBox.minY, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4 + 1);
}
else if (var7 == 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, var6, this.boundingBox.minY, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4 + 1);
}
}
}
else
{
for (var6 = this.boundingBox.minZ + 3; var6 + 3 <= this.boundingBox.maxZ; var6 += 5)
{
var7 = p_74861_3_.nextInt(5);
if (var7 == 0)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY, var6, EnumFacing.WEST, var4 + 1);
}
else if (var7 == 1)
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY, var6, EnumFacing.EAST, var4 + 1);
}
}
}
}
}
protected boolean func_180778_a(World worldIn, StructureBoundingBox p_180778_2_, Random p_180778_3_, int p_180778_4_, int p_180778_5_, int p_180778_6_, List p_180778_7_, int p_180778_8_)
{
BlockPos var9 = new BlockPos(this.getXWithOffset(p_180778_4_, p_180778_6_), this.getYWithOffset(p_180778_5_), this.getZWithOffset(p_180778_4_, p_180778_6_));
if (p_180778_2_.func_175898_b(var9) && worldIn.getBlockState(var9).getBlock().getMaterial() == Material.air)
{
int var10 = p_180778_3_.nextBoolean() ? 1 : 0;
worldIn.setBlockState(var9, Blocks.rail.getStateFromMeta(this.getMetadataWithOffset(Blocks.rail, var10)), 2);
EntityMinecartChest var11 = new EntityMinecartChest(worldIn, (double)((float)var9.getX() + 0.5F), (double)((float)var9.getY() + 0.5F), (double)((float)var9.getZ() + 0.5F));
WeightedRandomChestContent.generateChestContents(p_180778_3_, p_180778_7_, var11, p_180778_8_);
worldIn.spawnEntityInWorld(var11);
return true;
}
else
{
return false;
}
}
public boolean addComponentParts(World worldIn, Random p_74875_2_, StructureBoundingBox p_74875_3_)
{
if (this.isLiquidInStructureBoundingBox(worldIn, p_74875_3_))
{
return false;
}
else
{
boolean var4 = false;
boolean var5 = true;
boolean var6 = false;
boolean var7 = true;
int var8 = this.sectionCount * 5 - 1;
this.func_175804_a(worldIn, p_74875_3_, 0, 0, 0, 2, 1, var8, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175805_a(worldIn, p_74875_3_, p_74875_2_, 0.8F, 0, 2, 0, 2, 2, var8, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
if (this.hasSpiders)
{
this.func_175805_a(worldIn, p_74875_3_, p_74875_2_, 0.6F, 0, 0, 0, 2, 1, var8, Blocks.web.getDefaultState(), Blocks.air.getDefaultState(), false);
}
int var9;
int var10;
for (var9 = 0; var9 < this.sectionCount; ++var9)
{
var10 = 2 + var9 * 5;
this.func_175804_a(worldIn, p_74875_3_, 0, 0, var10, 0, 1, var10, Blocks.oak_fence.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, 2, 0, var10, 2, 1, var10, Blocks.oak_fence.getDefaultState(), Blocks.air.getDefaultState(), false);
if (p_74875_2_.nextInt(4) == 0)
{
this.func_175804_a(worldIn, p_74875_3_, 0, 2, var10, 0, 2, var10, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, 2, 2, var10, 2, 2, var10, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
}
else
{
this.func_175804_a(worldIn, p_74875_3_, 0, 2, var10, 2, 2, var10, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
}
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.1F, 0, 2, var10 - 1, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.1F, 2, 2, var10 - 1, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.1F, 0, 2, var10 + 1, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.1F, 2, 2, var10 + 1, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 0, 2, var10 - 2, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 2, 2, var10 - 2, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 0, 2, var10 + 2, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 2, 2, var10 + 2, Blocks.web.getDefaultState());
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 1, 2, var10 - 1, Blocks.torch.getStateFromMeta(EnumFacing.UP.getIndex()));
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.05F, 1, 2, var10 + 1, Blocks.torch.getStateFromMeta(EnumFacing.UP.getIndex()));
if (p_74875_2_.nextInt(100) == 0)
{
this.func_180778_a(worldIn, p_74875_3_, p_74875_2_, 2, 0, var10 - 1, WeightedRandomChestContent.func_177629_a(StructureMineshaftPieces.field_175893_a, new WeightedRandomChestContent[] {Items.enchanted_book.getRandomEnchantedBook(p_74875_2_)}), 3 + p_74875_2_.nextInt(4));
}
if (p_74875_2_.nextInt(100) == 0)
{
this.func_180778_a(worldIn, p_74875_3_, p_74875_2_, 0, 0, var10 + 1, WeightedRandomChestContent.func_177629_a(StructureMineshaftPieces.field_175893_a, new WeightedRandomChestContent[] {Items.enchanted_book.getRandomEnchantedBook(p_74875_2_)}), 3 + p_74875_2_.nextInt(4));
}
if (this.hasSpiders && !this.spawnerPlaced)
{
int var11 = this.getYWithOffset(0);
int var12 = var10 - 1 + p_74875_2_.nextInt(3);
int var13 = this.getXWithOffset(1, var12);
var12 = this.getZWithOffset(1, var12);
BlockPos var14 = new BlockPos(var13, var11, var12);
if (p_74875_3_.func_175898_b(var14))
{
this.spawnerPlaced = true;
worldIn.setBlockState(var14, Blocks.mob_spawner.getDefaultState(), 2);
TileEntity var15 = worldIn.getTileEntity(var14);
if (var15 instanceof TileEntityMobSpawner)
{
((TileEntityMobSpawner)var15).getSpawnerBaseLogic().setEntityName("CaveSpider");
}
}
}
}
for (var9 = 0; var9 <= 2; ++var9)
{
for (var10 = 0; var10 <= var8; ++var10)
{
byte var17 = -1;
IBlockState var18 = this.func_175807_a(worldIn, var9, var17, var10, p_74875_3_);
if (var18.getBlock().getMaterial() == Material.air)
{
byte var19 = -1;
this.func_175811_a(worldIn, Blocks.planks.getDefaultState(), var9, var19, var10, p_74875_3_);
}
}
}
if (this.hasRails)
{
for (var9 = 0; var9 <= var8; ++var9)
{
IBlockState var16 = this.func_175807_a(worldIn, 1, -1, var9, p_74875_3_);
if (var16.getBlock().getMaterial() != Material.air && var16.getBlock().isFullBlock())
{
this.func_175809_a(worldIn, p_74875_3_, p_74875_2_, 0.7F, 1, 0, var9, Blocks.rail.getStateFromMeta(this.getMetadataWithOffset(Blocks.rail, 0)));
}
}
}
return true;
}
}
}
public static class Cross extends StructureComponent
{
private EnumFacing corridorDirection;
private boolean isMultipleFloors;
public Cross() {}
protected void writeStructureToNBT(NBTTagCompound p_143012_1_)
{
p_143012_1_.setBoolean("tf", this.isMultipleFloors);
p_143012_1_.setInteger("D", this.corridorDirection.getHorizontalIndex());
}
protected void readStructureFromNBT(NBTTagCompound p_143011_1_)
{
this.isMultipleFloors = p_143011_1_.getBoolean("tf");
this.corridorDirection = EnumFacing.getHorizontal(p_143011_1_.getInteger("D"));
}
public Cross(int p_i45624_1_, Random p_i45624_2_, StructureBoundingBox p_i45624_3_, EnumFacing p_i45624_4_)
{
super(p_i45624_1_);
this.corridorDirection = p_i45624_4_;
this.boundingBox = p_i45624_3_;
this.isMultipleFloors = p_i45624_3_.getYSize() > 3;
}
public static StructureBoundingBox func_175813_a(List p_175813_0_, Random p_175813_1_, int p_175813_2_, int p_175813_3_, int p_175813_4_, EnumFacing p_175813_5_)
{
StructureBoundingBox var6 = new StructureBoundingBox(p_175813_2_, p_175813_3_, p_175813_4_, p_175813_2_, p_175813_3_ + 2, p_175813_4_);
if (p_175813_1_.nextInt(4) == 0)
{
var6.maxY += 4;
}
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[p_175813_5_.ordinal()])
{
case 1:
var6.minX = p_175813_2_ - 1;
var6.maxX = p_175813_2_ + 3;
var6.minZ = p_175813_4_ - 4;
break;
case 2:
var6.minX = p_175813_2_ - 1;
var6.maxX = p_175813_2_ + 3;
var6.maxZ = p_175813_4_ + 4;
break;
case 3:
var6.minX = p_175813_2_ - 4;
var6.minZ = p_175813_4_ - 1;
var6.maxZ = p_175813_4_ + 3;
break;
case 4:
var6.maxX = p_175813_2_ + 4;
var6.minZ = p_175813_4_ - 1;
var6.maxZ = p_175813_4_ + 3;
}
return StructureComponent.findIntersecting(p_175813_0_, var6) != null ? null : var6;
}
public void buildComponent(StructureComponent p_74861_1_, List p_74861_2_, Random p_74861_3_)
{
int var4 = this.getComponentType();
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[this.corridorDirection.ordinal()])
{
case 1:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.WEST, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.EAST, var4);
break;
case 2:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.WEST, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.EAST, var4);
break;
case 3:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.WEST, var4);
break;
case 4:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY, this.boundingBox.minZ + 1, EnumFacing.EAST, var4);
}
if (this.isMultipleFloors)
{
if (p_74861_3_.nextBoolean())
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY + 3 + 1, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
}
if (p_74861_3_.nextBoolean())
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY + 3 + 1, this.boundingBox.minZ + 1, EnumFacing.WEST, var4);
}
if (p_74861_3_.nextBoolean())
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY + 3 + 1, this.boundingBox.minZ + 1, EnumFacing.EAST, var4);
}
if (p_74861_3_.nextBoolean())
{
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + 1, this.boundingBox.minY + 3 + 1, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
}
}
}
public boolean addComponentParts(World worldIn, Random p_74875_2_, StructureBoundingBox p_74875_3_)
{
if (this.isLiquidInStructureBoundingBox(worldIn, p_74875_3_))
{
return false;
}
else
{
if (this.isMultipleFloors)
{
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ, this.boundingBox.maxX - 1, this.boundingBox.minY + 3 - 1, this.boundingBox.maxZ, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.minY, this.boundingBox.minZ + 1, this.boundingBox.maxX, this.boundingBox.minY + 3 - 1, this.boundingBox.maxZ - 1, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.maxY - 2, this.boundingBox.minZ, this.boundingBox.maxX - 1, this.boundingBox.maxY, this.boundingBox.maxZ, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.maxY - 2, this.boundingBox.minZ + 1, this.boundingBox.maxX, this.boundingBox.maxY, this.boundingBox.maxZ - 1, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.minY + 3, this.boundingBox.minZ + 1, this.boundingBox.maxX - 1, this.boundingBox.minY + 3, this.boundingBox.maxZ - 1, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
}
else
{
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ, this.boundingBox.maxX - 1, this.boundingBox.maxY, this.boundingBox.maxZ, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.minY, this.boundingBox.minZ + 1, this.boundingBox.maxX, this.boundingBox.maxY, this.boundingBox.maxZ - 1, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
}
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.minZ + 1, this.boundingBox.minX + 1, this.boundingBox.maxY, this.boundingBox.minZ + 1, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX + 1, this.boundingBox.minY, this.boundingBox.maxZ - 1, this.boundingBox.minX + 1, this.boundingBox.maxY, this.boundingBox.maxZ - 1, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.maxX - 1, this.boundingBox.minY, this.boundingBox.minZ + 1, this.boundingBox.maxX - 1, this.boundingBox.maxY, this.boundingBox.minZ + 1, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.maxX - 1, this.boundingBox.minY, this.boundingBox.maxZ - 1, this.boundingBox.maxX - 1, this.boundingBox.maxY, this.boundingBox.maxZ - 1, Blocks.planks.getDefaultState(), Blocks.air.getDefaultState(), false);
for (int var4 = this.boundingBox.minX; var4 <= this.boundingBox.maxX; ++var4)
{
for (int var5 = this.boundingBox.minZ; var5 <= this.boundingBox.maxZ; ++var5)
{
if (this.func_175807_a(worldIn, var4, this.boundingBox.minY - 1, var5, p_74875_3_).getBlock().getMaterial() == Material.air)
{
this.func_175811_a(worldIn, Blocks.planks.getDefaultState(), var4, this.boundingBox.minY - 1, var5, p_74875_3_);
}
}
}
return true;
}
}
}
public static class Room extends StructureComponent
{
private List roomsLinkedToTheRoom = Lists.newLinkedList();
public Room() {}
public Room(int p_i2037_1_, Random p_i2037_2_, int p_i2037_3_, int p_i2037_4_)
{
super(p_i2037_1_);
this.boundingBox = new StructureBoundingBox(p_i2037_3_, 50, p_i2037_4_, p_i2037_3_ + 7 + p_i2037_2_.nextInt(6), 54 + p_i2037_2_.nextInt(6), p_i2037_4_ + 7 + p_i2037_2_.nextInt(6));
}
public void buildComponent(StructureComponent p_74861_1_, List p_74861_2_, Random p_74861_3_)
{
int var4 = this.getComponentType();
int var6 = this.boundingBox.getYSize() - 3 - 1;
if (var6 <= 0)
{
var6 = 1;
}
int var5;
StructureComponent var7;
StructureBoundingBox var8;
for (var5 = 0; var5 < this.boundingBox.getXSize(); var5 += 4)
{
var5 += p_74861_3_.nextInt(this.boundingBox.getXSize());
if (var5 + 3 > this.boundingBox.getXSize())
{
break;
}
var7 = StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + var5, this.boundingBox.minY + p_74861_3_.nextInt(var6) + 1, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
if (var7 != null)
{
var8 = var7.getBoundingBox();
this.roomsLinkedToTheRoom.add(new StructureBoundingBox(var8.minX, var8.minY, this.boundingBox.minZ, var8.maxX, var8.maxY, this.boundingBox.minZ + 1));
}
}
for (var5 = 0; var5 < this.boundingBox.getXSize(); var5 += 4)
{
var5 += p_74861_3_.nextInt(this.boundingBox.getXSize());
if (var5 + 3 > this.boundingBox.getXSize())
{
break;
}
var7 = StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX + var5, this.boundingBox.minY + p_74861_3_.nextInt(var6) + 1, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
if (var7 != null)
{
var8 = var7.getBoundingBox();
this.roomsLinkedToTheRoom.add(new StructureBoundingBox(var8.minX, var8.minY, this.boundingBox.maxZ - 1, var8.maxX, var8.maxY, this.boundingBox.maxZ));
}
}
for (var5 = 0; var5 < this.boundingBox.getZSize(); var5 += 4)
{
var5 += p_74861_3_.nextInt(this.boundingBox.getZSize());
if (var5 + 3 > this.boundingBox.getZSize())
{
break;
}
var7 = StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY + p_74861_3_.nextInt(var6) + 1, this.boundingBox.minZ + var5, EnumFacing.WEST, var4);
if (var7 != null)
{
var8 = var7.getBoundingBox();
this.roomsLinkedToTheRoom.add(new StructureBoundingBox(this.boundingBox.minX, var8.minY, var8.minZ, this.boundingBox.minX + 1, var8.maxY, var8.maxZ));
}
}
for (var5 = 0; var5 < this.boundingBox.getZSize(); var5 += 4)
{
var5 += p_74861_3_.nextInt(this.boundingBox.getZSize());
if (var5 + 3 > this.boundingBox.getZSize())
{
break;
}
var7 = StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY + p_74861_3_.nextInt(var6) + 1, this.boundingBox.minZ + var5, EnumFacing.EAST, var4);
if (var7 != null)
{
var8 = var7.getBoundingBox();
this.roomsLinkedToTheRoom.add(new StructureBoundingBox(this.boundingBox.maxX - 1, var8.minY, var8.minZ, this.boundingBox.maxX, var8.maxY, var8.maxZ));
}
}
}
public boolean addComponentParts(World worldIn, Random p_74875_2_, StructureBoundingBox p_74875_3_)
{
if (this.isLiquidInStructureBoundingBox(worldIn, p_74875_3_))
{
return false;
}
else
{
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.minY, this.boundingBox.minZ, this.boundingBox.maxX, this.boundingBox.minY, this.boundingBox.maxZ, Blocks.dirt.getDefaultState(), Blocks.air.getDefaultState(), true);
this.func_175804_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.minY + 1, this.boundingBox.minZ, this.boundingBox.maxX, Math.min(this.boundingBox.minY + 3, this.boundingBox.maxY), this.boundingBox.maxZ, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
Iterator var4 = this.roomsLinkedToTheRoom.iterator();
while (var4.hasNext())
{
StructureBoundingBox var5 = (StructureBoundingBox)var4.next();
this.func_175804_a(worldIn, p_74875_3_, var5.minX, var5.maxY - 2, var5.minZ, var5.maxX, var5.maxY, var5.maxZ, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
}
this.func_180777_a(worldIn, p_74875_3_, this.boundingBox.minX, this.boundingBox.minY + 4, this.boundingBox.minZ, this.boundingBox.maxX, this.boundingBox.maxY, this.boundingBox.maxZ, Blocks.air.getDefaultState(), false);
return true;
}
}
protected void writeStructureToNBT(NBTTagCompound p_143012_1_)
{
NBTTagList var2 = new NBTTagList();
Iterator var3 = this.roomsLinkedToTheRoom.iterator();
while (var3.hasNext())
{
StructureBoundingBox var4 = (StructureBoundingBox)var3.next();
var2.appendTag(var4.func_151535_h());
}
p_143012_1_.setTag("Entrances", var2);
}
protected void readStructureFromNBT(NBTTagCompound p_143011_1_)
{
NBTTagList var2 = p_143011_1_.getTagList("Entrances", 11);
for (int var3 = 0; var3 < var2.tagCount(); ++var3)
{
this.roomsLinkedToTheRoom.add(new StructureBoundingBox(var2.getIntArray(var3)));
}
}
}
public static class Stairs extends StructureComponent
{
public Stairs() {}
public Stairs(int p_i45623_1_, Random p_i45623_2_, StructureBoundingBox p_i45623_3_, EnumFacing p_i45623_4_)
{
super(p_i45623_1_);
this.coordBaseMode = p_i45623_4_;
this.boundingBox = p_i45623_3_;
}
protected void writeStructureToNBT(NBTTagCompound p_143012_1_) {}
protected void readStructureFromNBT(NBTTagCompound p_143011_1_) {}
public static StructureBoundingBox func_175812_a(List p_175812_0_, Random p_175812_1_, int p_175812_2_, int p_175812_3_, int p_175812_4_, EnumFacing p_175812_5_)
{
StructureBoundingBox var6 = new StructureBoundingBox(p_175812_2_, p_175812_3_ - 5, p_175812_4_, p_175812_2_, p_175812_3_ + 2, p_175812_4_);
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[p_175812_5_.ordinal()])
{
case 1:
var6.maxX = p_175812_2_ + 2;
var6.minZ = p_175812_4_ - 8;
break;
case 2:
var6.maxX = p_175812_2_ + 2;
var6.maxZ = p_175812_4_ + 8;
break;
case 3:
var6.minX = p_175812_2_ - 8;
var6.maxZ = p_175812_4_ + 2;
break;
case 4:
var6.maxX = p_175812_2_ + 8;
var6.maxZ = p_175812_4_ + 2;
}
return StructureComponent.findIntersecting(p_175812_0_, var6) != null ? null : var6;
}
public void buildComponent(StructureComponent p_74861_1_, List p_74861_2_, Random p_74861_3_)
{
int var4 = this.getComponentType();
if (this.coordBaseMode != null)
{
switch (StructureMineshaftPieces.SwitchEnumFacing.field_175894_a[this.coordBaseMode.ordinal()])
{
case 1:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY, this.boundingBox.minZ - 1, EnumFacing.NORTH, var4);
break;
case 2:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX, this.boundingBox.minY, this.boundingBox.maxZ + 1, EnumFacing.SOUTH, var4);
break;
case 3:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.minX - 1, this.boundingBox.minY, this.boundingBox.minZ, EnumFacing.WEST, var4);
break;
case 4:
StructureMineshaftPieces.func_175890_b(p_74861_1_, p_74861_2_, p_74861_3_, this.boundingBox.maxX + 1, this.boundingBox.minY, this.boundingBox.minZ, EnumFacing.EAST, var4);
}
}
}
public boolean addComponentParts(World worldIn, Random p_74875_2_, StructureBoundingBox p_74875_3_)
{
if (this.isLiquidInStructureBoundingBox(worldIn, p_74875_3_))
{
return false;
}
else
{
this.func_175804_a(worldIn, p_74875_3_, 0, 5, 0, 2, 7, 1, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
this.func_175804_a(worldIn, p_74875_3_, 0, 0, 7, 2, 2, 8, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
for (int var4 = 0; var4 < 5; ++var4)
{
this.func_175804_a(worldIn, p_74875_3_, 0, 5 - var4 - (var4 < 4 ? 1 : 0), 2 + var4, 2, 7 - var4, 2 + var4, Blocks.air.getDefaultState(), Blocks.air.getDefaultState(), false);
}
return true;
}
}
}
static final class SwitchEnumFacing
{
static final int[] field_175894_a = new int[EnumFacing.values().length];
static
{
try
{
field_175894_a[EnumFacing.NORTH.ordinal()] = 1;
}
catch (NoSuchFieldError var4)
{
;
}
try
{
field_175894_a[EnumFacing.SOUTH.ordinal()] = 2;
}
catch (NoSuchFieldError var3)
{
;
}
try
{
field_175894_a[EnumFacing.WEST.ordinal()] = 3;
}
catch (NoSuchFieldError var2)
{
;
}
try
{
field_175894_a[EnumFacing.EAST.ordinal()] = 4;
}
catch (NoSuchFieldError var1)
{
;
}
}
}
}
|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package com.epam.reportportal.apache.http.protocol;
import java.io.IOException;
import com.epam.reportportal.apache.http.Header;
import com.epam.reportportal.apache.http.HttpEntity;
import com.epam.reportportal.apache.http.HttpException;
import com.epam.reportportal.apache.http.HttpRequest;
import com.epam.reportportal.apache.http.HttpResponse;
import com.epam.reportportal.apache.http.HttpResponseInterceptor;
import com.epam.reportportal.apache.http.HttpStatus;
import com.epam.reportportal.apache.http.HttpVersion;
import com.epam.reportportal.apache.http.ProtocolVersion;
import com.epam.reportportal.apache.http.annotation.Immutable;
import com.epam.reportportal.apache.http.util.Args;
/**
* ResponseConnControl is responsible for adding <code>Connection</code> header
* to the outgoing responses, which is essential for managing persistence of
* <code>HTTP/1.0</code> connections. This interceptor is recommended for
* server side protocol processors.
*
* @since 4.0
*/
@Immutable
public class ResponseConnControl implements HttpResponseInterceptor {
public ResponseConnControl() {
super();
}
public void process(final HttpResponse response, final HttpContext context)
throws HttpException, IOException {
Args.notNull(response, "HTTP response");
final HttpCoreContext corecontext = HttpCoreContext.adapt(context);
// Always drop connection after certain type of responses
final int status = response.getStatusLine().getStatusCode();
if (status == HttpStatus.SC_BAD_REQUEST ||
status == HttpStatus.SC_REQUEST_TIMEOUT ||
status == HttpStatus.SC_LENGTH_REQUIRED ||
status == HttpStatus.SC_REQUEST_TOO_LONG ||
status == HttpStatus.SC_REQUEST_URI_TOO_LONG ||
status == HttpStatus.SC_SERVICE_UNAVAILABLE ||
status == HttpStatus.SC_NOT_IMPLEMENTED) {
response.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_CLOSE);
return;
}
final Header explicit = response.getFirstHeader(HTTP.CONN_DIRECTIVE);
if (explicit != null && HTTP.CONN_CLOSE.equalsIgnoreCase(explicit.getValue())) {
// Connection persistence explicitly disabled
return;
}
// Always drop connection for HTTP/1.0 responses and below
// if the content body cannot be correctly delimited
final HttpEntity entity = response.getEntity();
if (entity != null) {
final ProtocolVersion ver = response.getStatusLine().getProtocolVersion();
if (entity.getContentLength() < 0 &&
(!entity.isChunked() || ver.lessEquals(HttpVersion.HTTP_1_0))) {
response.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_CLOSE);
return;
}
}
// Drop connection if requested by the client or request was <= 1.0
final HttpRequest request = corecontext.getRequest();
if (request != null) {
final Header header = request.getFirstHeader(HTTP.CONN_DIRECTIVE);
if (header != null) {
response.setHeader(HTTP.CONN_DIRECTIVE, header.getValue());
} else if (request.getProtocolVersion().lessEquals(HttpVersion.HTTP_1_0)) {
response.setHeader(HTTP.CONN_DIRECTIVE, HTTP.CONN_CLOSE);
}
}
}
}
|
// Copyright (c) 2021 Terminus, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cloud.erda.analyzer.common.models;
import lombok.Data;
import java.util.HashMap;
import java.util.Map;
@Data
public class LogEvent {
// sources of { deployment, container or business id }
private String source;
// Log id
private String id;
// timestamp of log
private Long timestamp;
// stdout or stderr
private String stream;
// The offset of logfile
private Long offset;
// offset content
private String content;
// Tags of { docker, system or business tag }
private Map<String, String> tags = new HashMap<>();
}
|
package action;
import java.io.IOException;
import java.sql.Connection;
import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import dao.FabricaConexao;
import dao.JDBCInserirFormDAO;
@WebServlet("/DeletarFuncionario")
public class DeletarFuncionario extends HttpServlet {
private static final long serialVersionUID = 1L;
public DeletarFuncionario() {
super();
}
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.getWriter().append("Served at: ").append(request.getContextPath());
System.out.println("nTestando");
//http://localhost:8080/locadoraweb/DeletarFuncionario?matricula=?
String matricula = request.getParameter("matricula");
FabricaConexao fabrica = new FabricaConexao();
Connection conexao = fabrica.fazConexao();
JDBCInserirFormDAO dao = new JDBCInserirFormDAO(conexao);
dao.ExcluirFuncionario(matricula);
fabrica.fecharConexao();
RequestDispatcher r = request.getRequestDispatcher( "consultafuncionario.jsp" );
r.forward( request, response );
}
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
doGet(request, response);
}
}
|
package com.sequenceiq.cloudbreak.cloud.handler;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.dao.TransientDataAccessException;
import org.springframework.stereotype.Component;
import com.sequenceiq.cloudbreak.cloud.notification.model.ResourceNotification;
import com.sequenceiq.cloudbreak.cloud.notification.model.ResourcePersisted;
import com.sequenceiq.cloudbreak.cloud.retry.ErrorTask;
import com.sequenceiq.cloudbreak.cloud.retry.ExceptionCheckTask;
import com.sequenceiq.cloudbreak.cloud.retry.RetryTask;
import com.sequenceiq.cloudbreak.cloud.retry.RetryUtil;
import com.sequenceiq.cloudbreak.cloud.service.Persister;
import reactor.bus.Event;
import reactor.fn.Consumer;
@Component
public class ResourcePersistenceHandler implements Consumer<Event<ResourceNotification>> {
private static final Logger LOGGER = LoggerFactory.getLogger(ResourcePersistenceHandler.class);
@Inject
private Persister<ResourceNotification> cloudResourcePersisterService;
@Override
public void accept(final Event<ResourceNotification> event) {
LOGGER.info("Resource notification event received: {}", event);
final ResourceNotification notification = event.getData();
RetryUtil.withDefaultRetries()
.retry(new RetryTask() {
@Override
public void run() throws Exception {
ResourceNotification notificationPersisted = null;
switch (notification.getType()) {
case CREATE:
notificationPersisted = cloudResourcePersisterService.persist(notification);
break;
case UPDATE:
notificationPersisted = cloudResourcePersisterService.update(notification);
break;
case DELETE:
notificationPersisted = cloudResourcePersisterService.delete(notification);
break;
default:
throw new IllegalArgumentException("Unsupported notification type: " + notification.getType());
}
notificationPersisted.getPromise().onNext(new ResourcePersisted());
}
})
.checkIfRecoverable(new ExceptionCheckTask() {
@Override
public boolean check(Exception e) {
return e instanceof TransientDataAccessException;
}
})
.ifNotRecoverable(new ErrorTask() {
@Override
public void run(Exception e) {
notification.getPromise().onError(e);
}
}).run();
}
}
|
/**
* Copyright (C) 2013 – 2015 SLUB Dresden & Avantgarde Labs GmbH (<code@dswarm.org>)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dswarm.persistence.model.job.utils;
import org.dswarm.persistence.model.job.Function;
import org.dswarm.persistence.model.job.Transformation;
import org.dswarm.persistence.util.DMPPersistenceUtil;
/**
* @author tgaengler
*/
public final class FunctionUtils extends BasicFunctionUtils<Function> {
@Override
public boolean completeEquals(final Function existingObject, final Function newObject) {
if (Transformation.class.isInstance(existingObject) && Transformation.class.isInstance(newObject)) {
return DMPPersistenceUtil.getTransformationUtils().completeEquals((Transformation) existingObject, (Transformation) newObject);
}
return super.completeEquals(existingObject, newObject);
}
}
|
/**
* Copyright (c) Facebook, Inc. and its affiliates.
*
* <p>This source code is licensed under the MIT license found in the LICENSE file in the root
* directory of this source tree.
*/
package com.nonnative;
import android.content.Context;
import com.facebook.flipper.android.AndroidFlipperClient;
import com.facebook.flipper.android.utils.FlipperUtils;
import com.facebook.flipper.core.FlipperClient;
import com.facebook.flipper.plugins.crashreporter.CrashReporterPlugin;
import com.facebook.flipper.plugins.databases.DatabasesFlipperPlugin;
import com.facebook.flipper.plugins.fresco.FrescoFlipperPlugin;
import com.facebook.flipper.plugins.inspector.DescriptorMapping;
import com.facebook.flipper.plugins.inspector.InspectorFlipperPlugin;
import com.facebook.flipper.plugins.network.FlipperOkhttpInterceptor;
import com.facebook.flipper.plugins.network.NetworkFlipperPlugin;
import com.facebook.flipper.plugins.react.ReactFlipperPlugin;
import com.facebook.flipper.plugins.sharedpreferences.SharedPreferencesFlipperPlugin;
import com.facebook.react.ReactInstanceManager;
import com.facebook.react.bridge.ReactContext;
import com.facebook.react.modules.network.NetworkingModule;
import okhttp3.OkHttpClient;
public class ReactNativeFlipper {
public static void initializeFlipper(Context context, ReactInstanceManager reactInstanceManager) {
if (FlipperUtils.shouldEnableFlipper(context)) {
final FlipperClient client = AndroidFlipperClient.getInstance(context);
client.addPlugin(new InspectorFlipperPlugin(context, DescriptorMapping.withDefaults()));
client.addPlugin(new ReactFlipperPlugin());
client.addPlugin(new DatabasesFlipperPlugin(context));
client.addPlugin(new SharedPreferencesFlipperPlugin(context));
client.addPlugin(CrashReporterPlugin.getInstance());
NetworkFlipperPlugin networkFlipperPlugin = new NetworkFlipperPlugin();
NetworkingModule.setCustomClientBuilder(
new NetworkingModule.CustomClientBuilder() {
@Override
public void apply(OkHttpClient.Builder builder) {
builder.addNetworkInterceptor(new FlipperOkhttpInterceptor(networkFlipperPlugin));
}
});
client.addPlugin(networkFlipperPlugin);
client.start();
// Fresco Plugin needs to ensure that ImagePipelineFactory is initialized
// Hence we run if after all native modules have been initialized
ReactContext reactContext = reactInstanceManager.getCurrentReactContext();
if (reactContext == null) {
reactInstanceManager.addReactInstanceEventListener(
new ReactInstanceManager.ReactInstanceEventListener() {
@Override
public void onReactContextInitialized(ReactContext reactContext) {
reactInstanceManager.removeReactInstanceEventListener(this);
reactContext.runOnNativeModulesQueueThread(
new Runnable() {
@Override
public void run() {
client.addPlugin(new FrescoFlipperPlugin());
}
});
}
});
} else {
client.addPlugin(new FrescoFlipperPlugin());
}
}
}
}
|
package com.mojang.nbt;
import java.io.*;
import com.google.gson.*;
/**
* Inspired by NBT classes given by Mojang AB <a href="https://mojang.com/2012/02/new-minecraft-map-format-anvil/">here</a>
* <br/>Following the <a href="http://web.archive.org/web/20110723210920/http://www.minecraft.net/docs/NBT.txt">specifications created by Markus 'notch' Personn </a>
* @author Mojang AB
*/
public class NBTIntTag extends NBTTag
{
private int value;
protected NBTIntTag(String name)
{
this(name, 0);
}
protected NBTIntTag(String name, int value)
{
super(name);
this.value = value;
}
@Override
public void write(DataOutput dos) throws IOException
{
dos.writeInt(value);
}
@Override
public void read(DataInput dis) throws IOException
{
value = dis.readInt();
}
@Override
public String toString()
{
return "" + value;
}
@Override
public NBTTypes getID()
{
return NBTTypes.INT;
}
@Override
public NBTTag clone()
{
return new NBTIntTag(getName(), value);
}
public int getData()
{
return value;
}
@Override
public boolean equals(Object obj)
{
if(super.equals(obj))
{
NBTIntTag o = (NBTIntTag) obj;
return o.value == value;
}
return false;
}
@Override
public JsonElement toJson()
{
return new JsonPrimitive(value);
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.lucene.queries.function;
import java.util.Arrays;
import java.util.List;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.SortedDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.CheckHits;
import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.LongValuesSource;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.apache.lucene.util.LuceneTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
public class TestIndexReaderFunctions extends LuceneTestCase {
static Directory dir;
static Analyzer analyzer;
static IndexReader reader;
static IndexSearcher searcher;
static final List<String[]> documents = Arrays.asList(
/* id, double, float, int, long, string, text, double MV (x3), int MV (x3)*/
new String[] { "0", "3.63", "5.2", "35", "4343", "test", "this is a test test test", "2.13", "3.69", "-0.11", "1", "7", "5"},
new String[] { "1", "5.65", "9.3", "54", "1954", "bar", "second test", "12.79", "123.456", "0.01", "12", "900", "-1" });
@BeforeClass
public static void beforeClass() throws Exception {
dir = newDirectory();
analyzer = new MockAnalyzer(random());
IndexWriterConfig iwConfig = newIndexWriterConfig(analyzer);
iwConfig.setMergePolicy(newLogMergePolicy());
RandomIndexWriter iw = new RandomIndexWriter(random(), dir, iwConfig);
for (String [] doc : documents) {
Document document = new Document();
document.add(new StringField("id", doc[0], Field.Store.NO));
document.add(new SortedDocValuesField("id", new BytesRef(doc[0])));
document.add(new StringField("string", doc[5], Field.Store.NO));
document.add(new SortedDocValuesField("string", new BytesRef(doc[5])));
document.add(new TextField("text", doc[6], Field.Store.NO));
iw.addDocument(document);
}
reader = iw.getReader();
searcher = newSearcher(reader);
iw.close();
}
@AfterClass
public static void afterClass() throws Exception {
IOUtils.close(reader, dir, analyzer);
searcher = null;
reader = null;
dir = null;
analyzer = null;
}
public void testDocFreq() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.docFreq(new Term("text", "test"));
assertHits(vs, new float[] { 2f, 2f });
assertEquals("docFreq(text:test)", vs.toString());
assertCacheable(vs, false);
}
public void testMaxDoc() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.maxDoc();
assertHits(vs, new float[] { 2f, 2f });
assertEquals("maxDoc()", vs.toString());
assertCacheable(vs, false);
}
public void testNumDocs() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.numDocs();
assertHits(vs, new float[] { 2f, 2f });
assertEquals("numDocs()", vs.toString());
assertCacheable(vs, false);
}
public void testSumTotalTermFreq() throws Exception {
LongValuesSource vs = IndexReaderFunctions.sumTotalTermFreq("text");
assertHits(vs.toDoubleValuesSource(), new float[] { 8f, 8f });
assertEquals("sumTotalTermFreq(text)", vs.toString());
assertCacheable(vs, false);
}
public void testTermFreq() throws Exception {
assertHits(IndexReaderFunctions.termFreq(new Term("string", "bar")), new float[] { 0f, 1f });
assertHits(IndexReaderFunctions.termFreq(new Term("text", "test")), new float[] { 3f, 1f });
assertHits(IndexReaderFunctions.termFreq(new Term("bogus", "bogus")), new float[] { 0F, 0F });
assertEquals("termFreq(string:bar)", IndexReaderFunctions.termFreq(new Term("string", "bar")).toString());
assertCacheable(IndexReaderFunctions.termFreq(new Term("text", "test")), true);
}
public void testTotalTermFreq() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.totalTermFreq(new Term("text", "test"));
assertHits(vs, new float[] { 4f, 4f });
assertEquals("totalTermFreq(text:test)", vs.toString());
assertCacheable(vs, false);
}
public void testNumDeletedDocs() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.numDeletedDocs();
assertHits(vs, new float[] { 0, 0 });
assertEquals("numDeletedDocs()", vs.toString());
assertCacheable(vs, false);
}
public void testSumDocFreq() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.sumDocFreq("text");
assertHits(vs, new float[] { 6, 6 });
assertEquals("sumDocFreq(text)", vs.toString());
assertCacheable(vs, false);
}
public void testDocCount() throws Exception {
DoubleValuesSource vs = IndexReaderFunctions.docCount("text");
assertHits(vs, new float[] { 2, 2 });
assertEquals("docCount(text)", vs.toString());
assertCacheable(vs, false);
}
void assertCacheable(DoubleValuesSource vs, boolean expected) throws Exception {
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs);
Weight w = searcher.createWeight(q, true, 1);
LeafReaderContext ctx = reader.leaves().get(0);
assertEquals(expected, w.isCacheable(ctx));
}
void assertCacheable(LongValuesSource vs, boolean expected) throws Exception {
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs.toDoubleValuesSource());
Weight w = searcher.createWeight(q, true, 1);
LeafReaderContext ctx = reader.leaves().get(0);
assertEquals(expected, w.isCacheable(ctx));
}
void assertHits(DoubleValuesSource vs, float scores[]) throws Exception {
Query q = new FunctionScoreQuery(new MatchAllDocsQuery(), vs);
ScoreDoc expected[] = new ScoreDoc[scores.length];
int expectedDocs[] = new int[scores.length];
for (int i = 0; i < expected.length; i++) {
expectedDocs[i] = i;
expected[i] = new ScoreDoc(i, scores[i]);
}
TopDocs docs = searcher.search(q, documents.size(),
new Sort(new SortField("id", SortField.Type.STRING)), true, false);
CheckHits.checkHits(random(), q, "", searcher, expectedDocs);
CheckHits.checkHitsQuery(q, expected, docs.scoreDocs, expectedDocs);
CheckHits.checkExplanations(q, "", searcher);
assertSort(vs, expected);
}
void assertSort(DoubleValuesSource vs, ScoreDoc expected[]) throws Exception {
boolean reversed = random().nextBoolean();
Arrays.sort(expected, (a, b) -> reversed ? (int) (b.score - a.score) : (int) (a.score - b.score));
int[] expectedDocs = new int[expected.length];
for (int i = 0; i < expected.length; i++) {
expectedDocs[i] = expected[i].doc;
}
TopDocs docs = searcher.search(new MatchAllDocsQuery(), expected.length,
new Sort(vs.getSortField(reversed)));
CheckHits.checkHitsQuery(new MatchAllDocsQuery(), expected, docs.scoreDocs, expectedDocs);
}
}
|
package gov.faa.ang.swac.common.flightmodeling;
public enum RerouteTrigger {
ASCENT_END,
POLY_CROSSING,
AIRPORT_CONFIG,
ARRIVAL_SHORTENING,
CDA,
FIX_BALANCING,
OCEANIC_CLIMB,
OCEANIC_DECENT,
OCEANIC_BLOCKED_CLIMB;
}
|
/*
* Copyright (c) 2016-2017, Adam <Adam@sigterm.info>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package net.runelite.client.ui.overlay;
import com.google.common.base.Strings;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Graphics2D;
import java.awt.Polygon;
import java.awt.RenderingHints;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.image.BufferedImage;
import net.runelite.api.Actor;
import net.runelite.api.Client;
import net.runelite.api.Perspective;
import net.runelite.api.Point;
import net.runelite.api.TileObject;
import net.runelite.api.coords.LocalPoint;
import net.runelite.client.util.ColorUtil;
/**
* Created by Kyle Fricilone on Jun 09, 2017.
*/
public class OverlayUtil
{
private static final int MINIMAP_DOT_RADIUS = 4;
private static final double UNIT = Math.PI / 1024.0d;
public static void renderPolygon(Graphics2D graphics, Shape poly, Color color)
{
graphics.setColor(color);
final Stroke originalStroke = graphics.getStroke();
graphics.setStroke(new BasicStroke(2));
graphics.draw(poly);
graphics.setColor(new Color(0, 0, 0, 50));
graphics.fill(poly);
graphics.setStroke(originalStroke);
}
public static void renderPolygonFilled(Graphics2D graphics, Polygon poly, Color color, int fillAlpha)
{
graphics.setColor(color);
final Stroke originalStroke = graphics.getStroke();
graphics.setStroke(new BasicStroke(2));
graphics.drawPolygon(poly);
graphics.setColor(new Color(color.getRed(), color.getGreen(), color.getBlue(), fillAlpha));
graphics.fillPolygon(poly);
graphics.setStroke(originalStroke);
}
public static void renderMinimapLocation(Graphics2D graphics, Point mini, Color color)
{
graphics.setColor(Color.BLACK);
graphics.fillOval(mini.getX() - MINIMAP_DOT_RADIUS / 2, mini.getY() - MINIMAP_DOT_RADIUS / 2 + 1, MINIMAP_DOT_RADIUS, MINIMAP_DOT_RADIUS);
graphics.setColor(ColorUtil.colorWithAlpha(color, 0xFF));
graphics.fillOval(mini.getX() - MINIMAP_DOT_RADIUS / 2, mini.getY() - MINIMAP_DOT_RADIUS / 2, MINIMAP_DOT_RADIUS, MINIMAP_DOT_RADIUS);
}
public static void renderMinimapRect(Client client, Graphics2D graphics, Point center, int width, int height, Color color)
{
double angle = client.getMapAngle() * UNIT;
graphics.setColor(color);
graphics.rotate(angle, center.getX(), center.getY());
graphics.drawRect(center.getX() - width / 2, center.getY() - height / 2, width, height);
graphics.rotate(-angle , center.getX(), center.getY());
}
public static void renderTextLocation(Graphics2D graphics, Point txtLoc, String text, Color color)
{
if (Strings.isNullOrEmpty(text))
{
return;
}
int x = txtLoc.getX();
int y = txtLoc.getY();
graphics.setColor(Color.BLACK);
graphics.drawString(text, x + 1, y + 1);
graphics.setColor(ColorUtil.colorWithAlpha(color, 0xFF));
graphics.drawString(text, x, y);
}
public static void renderImageLocation(Client client, Graphics2D graphics, LocalPoint localPoint, BufferedImage image, int zOffset)
{
net.runelite.api.Point imageLocation = Perspective.getCanvasImageLocation(client, localPoint, image, zOffset);
if (imageLocation != null)
{
renderImageLocation(graphics, imageLocation, image);
}
}
public static void renderImageLocation(Graphics2D graphics, Point imgLoc, BufferedImage image)
{
int x = imgLoc.getX();
int y = imgLoc.getY();
graphics.drawImage(image, x, y, null);
}
public static void renderActorOverlay(Graphics2D graphics, Actor actor, String text, Color color)
{
Polygon poly = actor.getCanvasTilePoly();
if (poly != null)
{
renderPolygon(graphics, poly, color);
}
Point textLocation = actor.getCanvasTextLocation(graphics, text, actor.getLogicalHeight() + 40);
if (textLocation != null)
{
renderTextLocation(graphics, textLocation, text, color);
}
}
public static void renderActorOverlayImage(Graphics2D graphics, Actor actor, BufferedImage image, Color color, int zOffset)
{
Polygon poly = actor.getCanvasTilePoly();
if (poly != null)
{
renderPolygon(graphics, poly, color);
}
Point imageLocation = actor.getCanvasImageLocation(image, zOffset);
if (imageLocation != null)
{
renderImageLocation(graphics, imageLocation, image);
}
}
public static void renderTileOverlay(Graphics2D graphics, TileObject tileObject, String text, Color color)
{
Polygon poly = tileObject.getCanvasTilePoly();
if (poly != null)
{
renderPolygon(graphics, poly, color);
}
Point minimapLocation = tileObject.getMinimapLocation();
if (minimapLocation != null)
{
renderMinimapLocation(graphics, minimapLocation, color);
}
Point textLocation = tileObject.getCanvasTextLocation(graphics, text, 0);
if (textLocation != null)
{
renderTextLocation(graphics, textLocation, text, color);
}
}
public static void renderTileOverlay(Client client, Graphics2D graphics, LocalPoint localLocation, BufferedImage image, Color color)
{
Polygon poly = Perspective.getCanvasTilePoly(client, localLocation);
if (poly != null)
{
renderPolygon(graphics, poly, color);
}
renderImageLocation(client, graphics, localLocation, image, 0);
}
public static void renderHoverableArea(Graphics2D graphics, Shape area, net.runelite.api.Point mousePosition, Color fillColor, Color borderColor, Color borderHoverColor)
{
if (area != null)
{
if (area.contains(mousePosition.getX(), mousePosition.getY()))
{
graphics.setColor(borderHoverColor);
}
else
{
graphics.setColor(borderColor);
}
graphics.draw(area);
graphics.setColor(fillColor);
graphics.fill(area);
}
}
public static void setGraphicProperties(Graphics2D graphics)
{
graphics.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
}
public static java.awt.Point padPosition(OverlayPosition position, Dimension dimension, final int padding)
{
final java.awt.Point result = new java.awt.Point();
switch (position)
{
case DYNAMIC:
case TOOLTIP:
break;
case BOTTOM_LEFT:
result.x += dimension.width + (dimension.width == 0 ? 0 : padding);
break;
case BOTTOM_RIGHT:
result.x -= dimension.width + (dimension.width == 0 ? 0 : padding);
break;
case TOP_LEFT:
case TOP_CENTER:
case CANVAS_TOP_RIGHT:
case TOP_RIGHT:
result.y += dimension.height + (dimension.height == 0 ? 0 : padding);
break;
case ABOVE_CHATBOX_RIGHT:
result.y -= dimension.height + (dimension.height == 0 ? 0 : padding);
break;
}
return result;
}
public static java.awt.Point transformPosition(OverlayPosition position, Dimension dimension)
{
final java.awt.Point result = new java.awt.Point();
switch (position)
{
case DYNAMIC:
case TOOLTIP:
case TOP_LEFT:
break;
case TOP_CENTER:
result.x = -dimension.width / 2;
break;
case BOTTOM_LEFT:
result.y = -dimension.height;
break;
case BOTTOM_RIGHT:
case ABOVE_CHATBOX_RIGHT:
result.y = -dimension.height;
// FALLTHROUGH
case CANVAS_TOP_RIGHT:
case TOP_RIGHT:
result.x = -dimension.width;
break;
}
return result;
}
}
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/dashboard/v1/layouts.proto
package com.google.monitoring.dashboard.v1;
/**
* <pre>
* A simplified layout that divides the available space into vertical columns
* and arranges a set of widgets vertically in each column.
* </pre>
*
* Protobuf type {@code google.monitoring.dashboard.v1.ColumnLayout}
*/
public final class ColumnLayout extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.monitoring.dashboard.v1.ColumnLayout)
ColumnLayoutOrBuilder {
private static final long serialVersionUID = 0L;
// Use ColumnLayout.newBuilder() to construct.
private ColumnLayout(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ColumnLayout() {
columns_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ColumnLayout();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ColumnLayout(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
columns_ = new java.util.ArrayList<com.google.monitoring.dashboard.v1.ColumnLayout.Column>();
mutable_bitField0_ |= 0x00000001;
}
columns_.add(
input.readMessage(com.google.monitoring.dashboard.v1.ColumnLayout.Column.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
columns_ = java.util.Collections.unmodifiableList(columns_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.dashboard.v1.ColumnLayout.class, com.google.monitoring.dashboard.v1.ColumnLayout.Builder.class);
}
public interface ColumnOrBuilder extends
// @@protoc_insertion_point(interface_extends:google.monitoring.dashboard.v1.ColumnLayout.Column)
com.google.protobuf.MessageOrBuilder {
/**
* <pre>
* The relative weight of this column. The column weight is used to adjust
* the width of columns on the screen (relative to peers).
* Greater the weight, greater the width of the column on the screen.
* If omitted, a value of 1 is used while rendering.
* </pre>
*
* <code>int64 weight = 1;</code>
* @return The weight.
*/
long getWeight();
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
java.util.List<com.google.monitoring.dashboard.v1.Widget>
getWidgetsList();
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
com.google.monitoring.dashboard.v1.Widget getWidgets(int index);
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
int getWidgetsCount();
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
java.util.List<? extends com.google.monitoring.dashboard.v1.WidgetOrBuilder>
getWidgetsOrBuilderList();
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
com.google.monitoring.dashboard.v1.WidgetOrBuilder getWidgetsOrBuilder(
int index);
}
/**
* <pre>
* Defines the layout properties and content for a column.
* </pre>
*
* Protobuf type {@code google.monitoring.dashboard.v1.ColumnLayout.Column}
*/
public static final class Column extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.monitoring.dashboard.v1.ColumnLayout.Column)
ColumnOrBuilder {
private static final long serialVersionUID = 0L;
// Use Column.newBuilder() to construct.
private Column(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Column() {
widgets_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new Column();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Column(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8: {
weight_ = input.readInt64();
break;
}
case 18: {
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
widgets_ = new java.util.ArrayList<com.google.monitoring.dashboard.v1.Widget>();
mutable_bitField0_ |= 0x00000001;
}
widgets_.add(
input.readMessage(com.google.monitoring.dashboard.v1.Widget.parser(), extensionRegistry));
break;
}
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
widgets_ = java.util.Collections.unmodifiableList(widgets_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_Column_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.dashboard.v1.ColumnLayout.Column.class, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder.class);
}
public static final int WEIGHT_FIELD_NUMBER = 1;
private long weight_;
/**
* <pre>
* The relative weight of this column. The column weight is used to adjust
* the width of columns on the screen (relative to peers).
* Greater the weight, greater the width of the column on the screen.
* If omitted, a value of 1 is used while rendering.
* </pre>
*
* <code>int64 weight = 1;</code>
* @return The weight.
*/
@java.lang.Override
public long getWeight() {
return weight_;
}
public static final int WIDGETS_FIELD_NUMBER = 2;
private java.util.List<com.google.monitoring.dashboard.v1.Widget> widgets_;
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.monitoring.dashboard.v1.Widget> getWidgetsList() {
return widgets_;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.monitoring.dashboard.v1.WidgetOrBuilder>
getWidgetsOrBuilderList() {
return widgets_;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
@java.lang.Override
public int getWidgetsCount() {
return widgets_.size();
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
@java.lang.Override
public com.google.monitoring.dashboard.v1.Widget getWidgets(int index) {
return widgets_.get(index);
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
@java.lang.Override
public com.google.monitoring.dashboard.v1.WidgetOrBuilder getWidgetsOrBuilder(
int index) {
return widgets_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (weight_ != 0L) {
output.writeInt64(1, weight_);
}
for (int i = 0; i < widgets_.size(); i++) {
output.writeMessage(2, widgets_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (weight_ != 0L) {
size += com.google.protobuf.CodedOutputStream
.computeInt64Size(1, weight_);
}
for (int i = 0; i < widgets_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, widgets_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.dashboard.v1.ColumnLayout.Column)) {
return super.equals(obj);
}
com.google.monitoring.dashboard.v1.ColumnLayout.Column other = (com.google.monitoring.dashboard.v1.ColumnLayout.Column) obj;
if (getWeight()
!= other.getWeight()) return false;
if (!getWidgetsList()
.equals(other.getWidgetsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + WEIGHT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(
getWeight());
if (getWidgetsCount() > 0) {
hash = (37 * hash) + WIDGETS_FIELD_NUMBER;
hash = (53 * hash) + getWidgetsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.dashboard.v1.ColumnLayout.Column prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Defines the layout properties and content for a column.
* </pre>
*
* Protobuf type {@code google.monitoring.dashboard.v1.ColumnLayout.Column}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.monitoring.dashboard.v1.ColumnLayout.Column)
com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_Column_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_Column_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.dashboard.v1.ColumnLayout.Column.class, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder.class);
}
// Construct using com.google.monitoring.dashboard.v1.ColumnLayout.Column.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getWidgetsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
weight_ = 0L;
if (widgetsBuilder_ == null) {
widgets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
widgetsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_Column_descriptor;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.Column getDefaultInstanceForType() {
return com.google.monitoring.dashboard.v1.ColumnLayout.Column.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.Column build() {
com.google.monitoring.dashboard.v1.ColumnLayout.Column result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.Column buildPartial() {
com.google.monitoring.dashboard.v1.ColumnLayout.Column result = new com.google.monitoring.dashboard.v1.ColumnLayout.Column(this);
int from_bitField0_ = bitField0_;
result.weight_ = weight_;
if (widgetsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
widgets_ = java.util.Collections.unmodifiableList(widgets_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.widgets_ = widgets_;
} else {
result.widgets_ = widgetsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.dashboard.v1.ColumnLayout.Column) {
return mergeFrom((com.google.monitoring.dashboard.v1.ColumnLayout.Column)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.dashboard.v1.ColumnLayout.Column other) {
if (other == com.google.monitoring.dashboard.v1.ColumnLayout.Column.getDefaultInstance()) return this;
if (other.getWeight() != 0L) {
setWeight(other.getWeight());
}
if (widgetsBuilder_ == null) {
if (!other.widgets_.isEmpty()) {
if (widgets_.isEmpty()) {
widgets_ = other.widgets_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureWidgetsIsMutable();
widgets_.addAll(other.widgets_);
}
onChanged();
}
} else {
if (!other.widgets_.isEmpty()) {
if (widgetsBuilder_.isEmpty()) {
widgetsBuilder_.dispose();
widgetsBuilder_ = null;
widgets_ = other.widgets_;
bitField0_ = (bitField0_ & ~0x00000001);
widgetsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getWidgetsFieldBuilder() : null;
} else {
widgetsBuilder_.addAllMessages(other.widgets_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.dashboard.v1.ColumnLayout.Column parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.monitoring.dashboard.v1.ColumnLayout.Column) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private long weight_ ;
/**
* <pre>
* The relative weight of this column. The column weight is used to adjust
* the width of columns on the screen (relative to peers).
* Greater the weight, greater the width of the column on the screen.
* If omitted, a value of 1 is used while rendering.
* </pre>
*
* <code>int64 weight = 1;</code>
* @return The weight.
*/
@java.lang.Override
public long getWeight() {
return weight_;
}
/**
* <pre>
* The relative weight of this column. The column weight is used to adjust
* the width of columns on the screen (relative to peers).
* Greater the weight, greater the width of the column on the screen.
* If omitted, a value of 1 is used while rendering.
* </pre>
*
* <code>int64 weight = 1;</code>
* @param value The weight to set.
* @return This builder for chaining.
*/
public Builder setWeight(long value) {
weight_ = value;
onChanged();
return this;
}
/**
* <pre>
* The relative weight of this column. The column weight is used to adjust
* the width of columns on the screen (relative to peers).
* Greater the weight, greater the width of the column on the screen.
* If omitted, a value of 1 is used while rendering.
* </pre>
*
* <code>int64 weight = 1;</code>
* @return This builder for chaining.
*/
public Builder clearWeight() {
weight_ = 0L;
onChanged();
return this;
}
private java.util.List<com.google.monitoring.dashboard.v1.Widget> widgets_ =
java.util.Collections.emptyList();
private void ensureWidgetsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
widgets_ = new java.util.ArrayList<com.google.monitoring.dashboard.v1.Widget>(widgets_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.Widget, com.google.monitoring.dashboard.v1.Widget.Builder, com.google.monitoring.dashboard.v1.WidgetOrBuilder> widgetsBuilder_;
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public java.util.List<com.google.monitoring.dashboard.v1.Widget> getWidgetsList() {
if (widgetsBuilder_ == null) {
return java.util.Collections.unmodifiableList(widgets_);
} else {
return widgetsBuilder_.getMessageList();
}
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public int getWidgetsCount() {
if (widgetsBuilder_ == null) {
return widgets_.size();
} else {
return widgetsBuilder_.getCount();
}
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public com.google.monitoring.dashboard.v1.Widget getWidgets(int index) {
if (widgetsBuilder_ == null) {
return widgets_.get(index);
} else {
return widgetsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder setWidgets(
int index, com.google.monitoring.dashboard.v1.Widget value) {
if (widgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWidgetsIsMutable();
widgets_.set(index, value);
onChanged();
} else {
widgetsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder setWidgets(
int index, com.google.monitoring.dashboard.v1.Widget.Builder builderForValue) {
if (widgetsBuilder_ == null) {
ensureWidgetsIsMutable();
widgets_.set(index, builderForValue.build());
onChanged();
} else {
widgetsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder addWidgets(com.google.monitoring.dashboard.v1.Widget value) {
if (widgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWidgetsIsMutable();
widgets_.add(value);
onChanged();
} else {
widgetsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder addWidgets(
int index, com.google.monitoring.dashboard.v1.Widget value) {
if (widgetsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureWidgetsIsMutable();
widgets_.add(index, value);
onChanged();
} else {
widgetsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder addWidgets(
com.google.monitoring.dashboard.v1.Widget.Builder builderForValue) {
if (widgetsBuilder_ == null) {
ensureWidgetsIsMutable();
widgets_.add(builderForValue.build());
onChanged();
} else {
widgetsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder addWidgets(
int index, com.google.monitoring.dashboard.v1.Widget.Builder builderForValue) {
if (widgetsBuilder_ == null) {
ensureWidgetsIsMutable();
widgets_.add(index, builderForValue.build());
onChanged();
} else {
widgetsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder addAllWidgets(
java.lang.Iterable<? extends com.google.monitoring.dashboard.v1.Widget> values) {
if (widgetsBuilder_ == null) {
ensureWidgetsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, widgets_);
onChanged();
} else {
widgetsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder clearWidgets() {
if (widgetsBuilder_ == null) {
widgets_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
widgetsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public Builder removeWidgets(int index) {
if (widgetsBuilder_ == null) {
ensureWidgetsIsMutable();
widgets_.remove(index);
onChanged();
} else {
widgetsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public com.google.monitoring.dashboard.v1.Widget.Builder getWidgetsBuilder(
int index) {
return getWidgetsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public com.google.monitoring.dashboard.v1.WidgetOrBuilder getWidgetsOrBuilder(
int index) {
if (widgetsBuilder_ == null) {
return widgets_.get(index); } else {
return widgetsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public java.util.List<? extends com.google.monitoring.dashboard.v1.WidgetOrBuilder>
getWidgetsOrBuilderList() {
if (widgetsBuilder_ != null) {
return widgetsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(widgets_);
}
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public com.google.monitoring.dashboard.v1.Widget.Builder addWidgetsBuilder() {
return getWidgetsFieldBuilder().addBuilder(
com.google.monitoring.dashboard.v1.Widget.getDefaultInstance());
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public com.google.monitoring.dashboard.v1.Widget.Builder addWidgetsBuilder(
int index) {
return getWidgetsFieldBuilder().addBuilder(
index, com.google.monitoring.dashboard.v1.Widget.getDefaultInstance());
}
/**
* <pre>
* The display widgets arranged vertically in this column.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.Widget widgets = 2;</code>
*/
public java.util.List<com.google.monitoring.dashboard.v1.Widget.Builder>
getWidgetsBuilderList() {
return getWidgetsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.Widget, com.google.monitoring.dashboard.v1.Widget.Builder, com.google.monitoring.dashboard.v1.WidgetOrBuilder>
getWidgetsFieldBuilder() {
if (widgetsBuilder_ == null) {
widgetsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.Widget, com.google.monitoring.dashboard.v1.Widget.Builder, com.google.monitoring.dashboard.v1.WidgetOrBuilder>(
widgets_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
widgets_ = null;
}
return widgetsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.dashboard.v1.ColumnLayout.Column)
}
// @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ColumnLayout.Column)
private static final com.google.monitoring.dashboard.v1.ColumnLayout.Column DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.dashboard.v1.ColumnLayout.Column();
}
public static com.google.monitoring.dashboard.v1.ColumnLayout.Column getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Column>
PARSER = new com.google.protobuf.AbstractParser<Column>() {
@java.lang.Override
public Column parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Column(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Column> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Column> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.Column getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int COLUMNS_FIELD_NUMBER = 1;
private java.util.List<com.google.monitoring.dashboard.v1.ColumnLayout.Column> columns_;
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.monitoring.dashboard.v1.ColumnLayout.Column> getColumnsList() {
return columns_;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder>
getColumnsOrBuilderList() {
return columns_;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
@java.lang.Override
public int getColumnsCount() {
return columns_.size();
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.Column getColumns(int index) {
return columns_.get(index);
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder getColumnsOrBuilder(
int index) {
return columns_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < columns_.size(); i++) {
output.writeMessage(1, columns_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < columns_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, columns_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.dashboard.v1.ColumnLayout)) {
return super.equals(obj);
}
com.google.monitoring.dashboard.v1.ColumnLayout other = (com.google.monitoring.dashboard.v1.ColumnLayout) obj;
if (!getColumnsList()
.equals(other.getColumnsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getColumnsCount() > 0) {
hash = (37 * hash) + COLUMNS_FIELD_NUMBER;
hash = (53 * hash) + getColumnsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.dashboard.v1.ColumnLayout parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.dashboard.v1.ColumnLayout prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A simplified layout that divides the available space into vertical columns
* and arranges a set of widgets vertically in each column.
* </pre>
*
* Protobuf type {@code google.monitoring.dashboard.v1.ColumnLayout}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.monitoring.dashboard.v1.ColumnLayout)
com.google.monitoring.dashboard.v1.ColumnLayoutOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.dashboard.v1.ColumnLayout.class, com.google.monitoring.dashboard.v1.ColumnLayout.Builder.class);
}
// Construct using com.google.monitoring.dashboard.v1.ColumnLayout.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getColumnsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (columnsBuilder_ == null) {
columns_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
columnsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.monitoring.dashboard.v1.LayoutsProto.internal_static_google_monitoring_dashboard_v1_ColumnLayout_descriptor;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout getDefaultInstanceForType() {
return com.google.monitoring.dashboard.v1.ColumnLayout.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout build() {
com.google.monitoring.dashboard.v1.ColumnLayout result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout buildPartial() {
com.google.monitoring.dashboard.v1.ColumnLayout result = new com.google.monitoring.dashboard.v1.ColumnLayout(this);
int from_bitField0_ = bitField0_;
if (columnsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
columns_ = java.util.Collections.unmodifiableList(columns_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.columns_ = columns_;
} else {
result.columns_ = columnsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.dashboard.v1.ColumnLayout) {
return mergeFrom((com.google.monitoring.dashboard.v1.ColumnLayout)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.dashboard.v1.ColumnLayout other) {
if (other == com.google.monitoring.dashboard.v1.ColumnLayout.getDefaultInstance()) return this;
if (columnsBuilder_ == null) {
if (!other.columns_.isEmpty()) {
if (columns_.isEmpty()) {
columns_ = other.columns_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureColumnsIsMutable();
columns_.addAll(other.columns_);
}
onChanged();
}
} else {
if (!other.columns_.isEmpty()) {
if (columnsBuilder_.isEmpty()) {
columnsBuilder_.dispose();
columnsBuilder_ = null;
columns_ = other.columns_;
bitField0_ = (bitField0_ & ~0x00000001);
columnsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getColumnsFieldBuilder() : null;
} else {
columnsBuilder_.addAllMessages(other.columns_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.dashboard.v1.ColumnLayout parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.monitoring.dashboard.v1.ColumnLayout) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private java.util.List<com.google.monitoring.dashboard.v1.ColumnLayout.Column> columns_ =
java.util.Collections.emptyList();
private void ensureColumnsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
columns_ = new java.util.ArrayList<com.google.monitoring.dashboard.v1.ColumnLayout.Column>(columns_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.ColumnLayout.Column, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder, com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder> columnsBuilder_;
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public java.util.List<com.google.monitoring.dashboard.v1.ColumnLayout.Column> getColumnsList() {
if (columnsBuilder_ == null) {
return java.util.Collections.unmodifiableList(columns_);
} else {
return columnsBuilder_.getMessageList();
}
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public int getColumnsCount() {
if (columnsBuilder_ == null) {
return columns_.size();
} else {
return columnsBuilder_.getCount();
}
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public com.google.monitoring.dashboard.v1.ColumnLayout.Column getColumns(int index) {
if (columnsBuilder_ == null) {
return columns_.get(index);
} else {
return columnsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder setColumns(
int index, com.google.monitoring.dashboard.v1.ColumnLayout.Column value) {
if (columnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnsIsMutable();
columns_.set(index, value);
onChanged();
} else {
columnsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder setColumns(
int index, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder builderForValue) {
if (columnsBuilder_ == null) {
ensureColumnsIsMutable();
columns_.set(index, builderForValue.build());
onChanged();
} else {
columnsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder addColumns(com.google.monitoring.dashboard.v1.ColumnLayout.Column value) {
if (columnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnsIsMutable();
columns_.add(value);
onChanged();
} else {
columnsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder addColumns(
int index, com.google.monitoring.dashboard.v1.ColumnLayout.Column value) {
if (columnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureColumnsIsMutable();
columns_.add(index, value);
onChanged();
} else {
columnsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder addColumns(
com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder builderForValue) {
if (columnsBuilder_ == null) {
ensureColumnsIsMutable();
columns_.add(builderForValue.build());
onChanged();
} else {
columnsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder addColumns(
int index, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder builderForValue) {
if (columnsBuilder_ == null) {
ensureColumnsIsMutable();
columns_.add(index, builderForValue.build());
onChanged();
} else {
columnsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder addAllColumns(
java.lang.Iterable<? extends com.google.monitoring.dashboard.v1.ColumnLayout.Column> values) {
if (columnsBuilder_ == null) {
ensureColumnsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, columns_);
onChanged();
} else {
columnsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder clearColumns() {
if (columnsBuilder_ == null) {
columns_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
columnsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public Builder removeColumns(int index) {
if (columnsBuilder_ == null) {
ensureColumnsIsMutable();
columns_.remove(index);
onChanged();
} else {
columnsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder getColumnsBuilder(
int index) {
return getColumnsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder getColumnsOrBuilder(
int index) {
if (columnsBuilder_ == null) {
return columns_.get(index); } else {
return columnsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public java.util.List<? extends com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder>
getColumnsOrBuilderList() {
if (columnsBuilder_ != null) {
return columnsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(columns_);
}
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder addColumnsBuilder() {
return getColumnsFieldBuilder().addBuilder(
com.google.monitoring.dashboard.v1.ColumnLayout.Column.getDefaultInstance());
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder addColumnsBuilder(
int index) {
return getColumnsFieldBuilder().addBuilder(
index, com.google.monitoring.dashboard.v1.ColumnLayout.Column.getDefaultInstance());
}
/**
* <pre>
* The columns of content to display.
* </pre>
*
* <code>repeated .google.monitoring.dashboard.v1.ColumnLayout.Column columns = 1;</code>
*/
public java.util.List<com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder>
getColumnsBuilderList() {
return getColumnsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.ColumnLayout.Column, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder, com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder>
getColumnsFieldBuilder() {
if (columnsBuilder_ == null) {
columnsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.dashboard.v1.ColumnLayout.Column, com.google.monitoring.dashboard.v1.ColumnLayout.Column.Builder, com.google.monitoring.dashboard.v1.ColumnLayout.ColumnOrBuilder>(
columns_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
columns_ = null;
}
return columnsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.dashboard.v1.ColumnLayout)
}
// @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ColumnLayout)
private static final com.google.monitoring.dashboard.v1.ColumnLayout DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.dashboard.v1.ColumnLayout();
}
public static com.google.monitoring.dashboard.v1.ColumnLayout getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ColumnLayout>
PARSER = new com.google.protobuf.AbstractParser<ColumnLayout>() {
@java.lang.Override
public ColumnLayout parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ColumnLayout(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ColumnLayout> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ColumnLayout> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.dashboard.v1.ColumnLayout getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.linkedin.pinot.broker.requesthandler;
import com.linkedin.pinot.broker.api.RequestStatistics;
import com.linkedin.pinot.broker.broker.AccessControlFactory;
import com.linkedin.pinot.broker.queryquota.TableQueryQuotaManager;
import com.linkedin.pinot.broker.routing.RoutingTable;
import com.linkedin.pinot.broker.routing.TimeBoundaryService;
import com.linkedin.pinot.common.config.TableNameBuilder;
import com.linkedin.pinot.common.metrics.BrokerMeter;
import com.linkedin.pinot.common.metrics.BrokerMetrics;
import com.linkedin.pinot.common.metrics.BrokerQueryPhase;
import com.linkedin.pinot.common.request.BrokerRequest;
import com.linkedin.pinot.common.response.BrokerResponse;
import com.linkedin.pinot.common.response.ServerInstance;
import com.linkedin.pinot.common.response.broker.BrokerResponseNative;
import com.linkedin.pinot.common.utils.CommonConstants.Helix.TableType;
import com.linkedin.pinot.common.utils.DataTable;
import com.linkedin.pinot.core.transport.AsyncQueryResponse;
import com.linkedin.pinot.core.transport.QueryRouter;
import com.linkedin.pinot.core.transport.Server;
import com.linkedin.pinot.core.transport.ServerResponse;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.annotation.Nullable;
import javax.annotation.concurrent.ThreadSafe;
import org.apache.commons.configuration.Configuration;
/**
* The <code>SingleConnectionBrokerRequestHandler</code> class is a thread-safe broker request handler using a single
* connection per server to route the queries.
*/
@ThreadSafe
public class SingleConnectionBrokerRequestHandler extends BaseBrokerRequestHandler {
private final QueryRouter _queryRouter;
public SingleConnectionBrokerRequestHandler(Configuration config, RoutingTable routingTable,
TimeBoundaryService timeBoundaryService, AccessControlFactory accessControlFactory,
TableQueryQuotaManager tableQueryQuotaManager, BrokerMetrics brokerMetrics) {
super(config, routingTable, timeBoundaryService, accessControlFactory, tableQueryQuotaManager, brokerMetrics);
_queryRouter = new QueryRouter(_brokerId, brokerMetrics);
}
@Override
public void start() {
}
@Override
public synchronized void shutDown() {
_queryRouter.shutDown();
}
@Override
protected BrokerResponse processBrokerRequest(long requestId, BrokerRequest originalBrokerRequest,
@Nullable BrokerRequest offlineBrokerRequest, @Nullable Map<String, List<String>> offlineRoutingTable,
@Nullable BrokerRequest realtimeBrokerRequest, @Nullable Map<String, List<String>> realtimeRoutingTable,
long timeoutMs, ServerStats serverStats, RequestStatistics requestStatistics) throws Exception {
assert offlineBrokerRequest != null || realtimeBrokerRequest != null;
String rawTableName = TableNameBuilder.extractRawTableName(originalBrokerRequest.getQuerySource().getTableName());
long scatterGatherStartTimeNs = System.nanoTime();
AsyncQueryResponse asyncQueryResponse =
_queryRouter.submitQuery(requestId, rawTableName, offlineBrokerRequest, offlineRoutingTable,
realtimeBrokerRequest, realtimeRoutingTable, timeoutMs);
Map<Server, ServerResponse> response = asyncQueryResponse.getResponse();
_brokerMetrics.addPhaseTiming(rawTableName, BrokerQueryPhase.SCATTER_GATHER,
System.nanoTime() - scatterGatherStartTimeNs);
// TODO Use scatterGatherStats as serverStats
serverStats.setServerStats(asyncQueryResponse.getStats());
// TODO: do not convert Server to ServerInstance
int numServersQueried = response.size();
long totalResponseSize = 0;
Map<ServerInstance, DataTable> dataTableMap = new HashMap<>(numServersQueried);
for (Map.Entry<Server, ServerResponse> entry : response.entrySet()) {
ServerResponse serverResponse = entry.getValue();
DataTable dataTable = serverResponse.getDataTable();
if (dataTable != null) {
Server server = entry.getKey();
if (server.getTableType() == TableType.OFFLINE) {
dataTableMap.put(new ServerInstance(server.getHostName(), server.getPort(), 0), dataTable);
} else {
dataTableMap.put(new ServerInstance(server.getHostName(), server.getPort(), 1), dataTable);
}
totalResponseSize += serverResponse.getResponseSize();
}
}
int numServersResponded = dataTableMap.size();
long reduceStartTimeNs = System.nanoTime();
BrokerResponseNative brokerResponse =
_brokerReduceService.reduceOnDataTable(originalBrokerRequest, dataTableMap, _brokerMetrics);
final long reduceTimeNanos = System.nanoTime() - reduceStartTimeNs;
requestStatistics.setReduceTimeNanos(reduceTimeNanos);
_brokerMetrics.addPhaseTiming(rawTableName, BrokerQueryPhase.REDUCE, reduceTimeNanos);
brokerResponse.setNumServersQueried(numServersQueried);
brokerResponse.setNumServersResponded(numServersResponded);
if (brokerResponse.getExceptionsSize() > 0) {
_brokerMetrics.addMeteredTableValue(rawTableName, BrokerMeter.BROKER_RESPONSES_WITH_PROCESSING_EXCEPTIONS, 1);
}
if (numServersQueried > numServersResponded) {
_brokerMetrics.addMeteredTableValue(rawTableName, BrokerMeter.BROKER_RESPONSES_WITH_PARTIAL_SERVERS_RESPONDED, 1);
}
_brokerMetrics.addMeteredTableValue(rawTableName, BrokerMeter.TOTAL_SERVER_RESPONSE_SIZE, totalResponseSize);
return brokerResponse;
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.fineract.portfolio.account.data;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.StringUtils;
import org.apache.fineract.infrastructure.core.api.JsonCommand;
import org.apache.fineract.infrastructure.core.data.ApiParameterError;
import org.apache.fineract.infrastructure.core.data.DataValidatorBuilder;
import org.apache.fineract.infrastructure.core.exception.InvalidJsonException;
import org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import org.apache.fineract.infrastructure.core.serialization.FromJsonHelper;
import org.apache.fineract.portfolio.account.AccountDetailConstants;
import org.apache.fineract.portfolio.account.api.AccountTransfersApiConstants;
import org.joda.time.LocalDate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.google.gson.JsonElement;
import com.google.gson.reflect.TypeToken;
@Component
public class AccountTransfersDataValidator {
private final FromJsonHelper fromApiJsonHelper;
private final AccountTransfersDetailDataValidator accountTransfersDetailDataValidator;
private static final Set<String> REQUEST_DATA_PARAMETERS = new HashSet<>(Arrays.asList(
AccountDetailConstants.localeParamName, AccountDetailConstants.dateFormatParamName,
AccountDetailConstants.fromOfficeIdParamName, AccountDetailConstants.fromClientIdParamName,
AccountDetailConstants.fromAccountTypeParamName, AccountDetailConstants.fromAccountIdParamName,
AccountDetailConstants.toOfficeIdParamName, AccountDetailConstants.toClientIdParamName,
AccountDetailConstants.toAccountTypeParamName, AccountDetailConstants.toAccountIdParamName,
AccountTransfersApiConstants.transferDateParamName, AccountTransfersApiConstants.transferAmountParamName,
AccountTransfersApiConstants.transferDescriptionParamName));
@Autowired
public AccountTransfersDataValidator(final FromJsonHelper fromApiJsonHelper,
final AccountTransfersDetailDataValidator accountTransfersDetailDataValidator) {
this.fromApiJsonHelper = fromApiJsonHelper;
this.accountTransfersDetailDataValidator = accountTransfersDetailDataValidator;
}
public void validate(final JsonCommand command) {
final String json = command.json();
if (StringUtils.isBlank(json)) { throw new InvalidJsonException(); }
final Type typeOfMap = new TypeToken<Map<String, Object>>() {}.getType();
this.fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, json, REQUEST_DATA_PARAMETERS);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new DataValidatorBuilder(dataValidationErrors)
.resource(AccountTransfersApiConstants.ACCOUNT_TRANSFER_RESOURCE_NAME);
final JsonElement element = command.parsedJson();
this.accountTransfersDetailDataValidator.validate(command, baseDataValidator);
final LocalDate transactionDate = this.fromApiJsonHelper
.extractLocalDateNamed(AccountTransfersApiConstants.transferDateParamName, element);
baseDataValidator.reset().parameter(AccountTransfersApiConstants.transferDateParamName).value
(transactionDate).notNull();
final BigDecimal transactionAmount = this.fromApiJsonHelper.extractBigDecimalWithLocaleNamed
(AccountTransfersApiConstants.transferAmountParamName, element);
baseDataValidator.reset().parameter(AccountTransfersApiConstants.transferAmountParamName).value
(transactionAmount).notNull().positiveAmount();
final String transactionDescription = this.fromApiJsonHelper.extractStringNamed(AccountTransfersApiConstants
.transferDescriptionParamName, element);
baseDataValidator.reset().parameter(AccountTransfersApiConstants.transferDescriptionParamName).value
(transactionDescription).notBlank()
.notExceedingLengthOf(200);
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
private void throwExceptionIfValidationWarningsExist(final List<ApiParameterError> dataValidationErrors) {
if (!dataValidationErrors.isEmpty()) { throw new PlatformApiDataValidationException(dataValidationErrors); }
}
}
|
////////////////////////////////////////////////////////////
//
// Anime Warfare
// Copyright (C) 2016 TiWinDeTea - contact@tiwindetea.org
//
// This software is provided 'as-is', without any express or implied warranty.
// In no event will the authors be held liable for any damages arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it freely,
// subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented;
// you must not claim that you wrote the original software.
// If you use this software in a product, an acknowledgment
// in the product documentation would be appreciated but is not required.
//
// 2. Altered source versions must be plainly marked as such,
// and must not be misrepresented as being the original software.
//
// 3. This notice may not be removed or altered from any source distribution.
//
////////////////////////////////////////////////////////////
package org.tiwindetea.animewarfare.net;
import org.tiwindetea.animewarfare.logic.FactionType;
import java.io.Serializable;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* This class represents a game room with players.
* It basically wraps the game name, its member,
* the address and port of the server, and wether it
* requires a password or not
*
* @author Lucas Lazare
* @since 0.1.0
*/
public class Room implements Serializable, Comparable<Room> {
private String gameName;
private transient String gamePassword;
private final ArrayList<GameClientInfo> members = new ArrayList<>();
private boolean isLocked;
private transient InetAddress address;
private int port;
private int numberOfExpectedPlayers = -1;
private final Map<GameClientInfo, FactionType> selections = new HashMap<>(4, 1);
private final Map<GameClientInfo, FactionType> locks = new HashMap<>(4, 1);
private static int NEXT_ID_VAL = 0;
private final int ROOM_ID = NEXT_ID_VAL++;
Room() {
this.gameName = null;
this.gamePassword = null;
this.isLocked = false;
}
Room(String gameName) {
this.gameName = gameName;
this.gamePassword = null;
this.isLocked = false;
}
Room(String gameName, String gamePassword) {
this.gameName = gameName;
this.gamePassword = gamePassword;
this.isLocked = (gamePassword != null);
}
Room(String gameName, String gamePassword, List<String> members) {
this.gameName = gameName;
this.gamePassword = gamePassword;
this.isLocked = (gamePassword != null);
}
/**
* @return true if the room has a password, false otherwise
*/
public boolean isLocked() {
return this.isLocked;
}
/**
* @return the name of the game
*/
public String getGameName() {
return this.gameName;
}
/**
* @return A unmodifiableList of the members of this room
*
* @see Collections#unmodifiableList(List)
*/
public List<GameClientInfo> getMembers() {
return Collections.unmodifiableList(this.members);
}
/**
* Search for a player in the room
*
* @param id id of the player
* @return The player, or null if there is no such player.
*/
public GameClientInfo find(int id) {
for (GameClientInfo member : this.members) {
if (member.getId() == id) {
return member;
}
}
return null;
}
/**
* Finds players in the room
*
* @param players the ids of the players to look for
* @return A list containing found players
*/
public List<GameClientInfo> findAll(List<Integer> players) {
LinkedList<GameClientInfo> ans = new LinkedList<>();
GameClientInfo currentPlayer;
for (Integer playerId : players) {
currentPlayer = find(playerId.intValue());
if (currentPlayer != null) {
ans.add(currentPlayer);
}
}
return ans;
}
/**
* @return the address of the server that has this room
*/
public InetAddress getAddress() {
return this.address;
}
/**
* @return the port of the server
*/
public int getPort() {
return this.port;
}
/**
* @return The number of expected players for this game. (-1 if unset)
*/
public int getNumberOfExpectedPlayers() {
return this.numberOfExpectedPlayers;
}
/**
* @return An unmodifiable map of infos about players' selections,
* as sepcified in {@link Collections#unmodifiableMap(Map)}
*/
public Map<GameClientInfo, FactionType> getSelections() {
return Collections.unmodifiableMap(this.selections);
}
/**
* @return An unmodifiable map of infos about players' locked factions,
* as sepcified in {@link Collections#unmodifiableMap(Map)}
*/
public Map<GameClientInfo, FactionType> getLocks() {
return Collections.unmodifiableMap(this.locks);
}
Map<GameClientInfo, FactionType> modifiableSelection() {
return this.selections;
}
Map<GameClientInfo, FactionType> modifiableLocks() {
return this.locks;
}
void addMember(GameClientInfo info) {
if (this.isFull()) {
throw new IllegalStateException("Trying to add a member into a full room");
}
this.members.add(info);
}
boolean checkPassword(String password) {
return this.gamePassword.equals(password) || (password != null && password.equals(this.gamePassword));
}
void removeMember(GameClientInfo member) {
this.removeMember(member.id);
}
GameClientInfo removeMember(int id) {
int i = 0;
Iterator<GameClientInfo> iterator = this.members.iterator();
while (iterator.hasNext() && iterator.next().getId() != id) {
++i;
}
if (i < this.members.size()) {
return this.members.remove(i);
} else {
return null;
}
}
void clear() {
this.members.clear();
this.locks.clear();
this.selections.clear();
}
void setGameName(String gameName) {
this.gameName = gameName;
}
void setGamePassword(String gamePassword) {
this.gamePassword = gamePassword;
this.isLocked = gamePassword != null;
}
void setAddress(InetAddress address) {
this.address = address;
}
void setPort(int port) {
this.port = port;
}
void setNumberOfExpectedPlayers(int numberOfExpectedPlayers) {
this.numberOfExpectedPlayers = numberOfExpectedPlayers;
}
boolean isFull() {
return this.numberOfExpectedPlayers == this.members.size();
}
String getGamePassword() {
return this.gamePassword;
}
boolean updateable(Room r) {
return this.ROOM_ID == r.ROOM_ID && !this.members.equals(r.members);
}
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return this.gameName;
}
@Override
public boolean equals(Object o) {
if (o instanceof Room) {
return this.equals((Room) o);
}
return false;
}
public boolean equals(Room room) {
return this.ROOM_ID == room.ROOM_ID
&& this.address.equals(room.address)
&& this.port == room.port;
}
@Override
public int hashCode() {
return this.address.hashCode() + this.port;
}
@Override
public int compareTo(Room o) {
return this.ROOM_ID - o.ROOM_ID;
}
}
|
/* (c) Copyright 2021 by Volker Bergmann. All rights reserved. */
package com.rapiddweller.common.cli;
/**
* Represents a command line flag which may be true or false.<br/><br/>
* Created: 21.10.2021 15:36:51
* @author Volker Bergmann
* @since 1.1.4
*/
public class CommandLineFlag extends CommandLineItem {
public CommandLineFlag(String property, String longName, String shortName) {
super(property, longName, shortName);
}
}
|
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Multimap;
import com.google.common.collect.MultimapBuilder;
import com.google.devtools.build.lib.actions.Action;
import com.google.devtools.build.lib.actions.ActionCacheChecker.Token;
import com.google.devtools.build.lib.actions.ActionCompletionEvent;
import com.google.devtools.build.lib.actions.ActionExecutedEvent;
import com.google.devtools.build.lib.actions.ActionExecutedEvent.ErrorTiming;
import com.google.devtools.build.lib.actions.ActionExecutionContext;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputDepOwnerMap;
import com.google.devtools.build.lib.actions.ActionInputDepOwners;
import com.google.devtools.build.lib.actions.ActionInputMap;
import com.google.devtools.build.lib.actions.ActionInputMapSink;
import com.google.devtools.build.lib.actions.ActionLookupData;
import com.google.devtools.build.lib.actions.ActionRewoundEvent;
import com.google.devtools.build.lib.actions.AlreadyReportedActionExecutionException;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.ArtifactPathResolver;
import com.google.devtools.build.lib.actions.DiscoveredInputsEvent;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.FilesetOutputSymlink;
import com.google.devtools.build.lib.actions.LostInputsActionExecutionException;
import com.google.devtools.build.lib.actions.MissingDepException;
import com.google.devtools.build.lib.actions.MissingInputFileException;
import com.google.devtools.build.lib.actions.PackageRootResolver;
import com.google.devtools.build.lib.actions.SpawnMetrics;
import com.google.devtools.build.lib.actionsketch.ActionSketch;
import com.google.devtools.build.lib.analysis.BlazeDirectories;
import com.google.devtools.build.lib.bugreport.BugReport;
import com.google.devtools.build.lib.causes.Cause;
import com.google.devtools.build.lib.causes.LabelCause;
import com.google.devtools.build.lib.clock.BlazeClock;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.PackageIdentifier;
import com.google.devtools.build.lib.collect.compacthashset.CompactHashSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.collect.nestedset.Order;
import com.google.devtools.build.lib.events.ExtendedEventHandler;
import com.google.devtools.build.lib.profiler.Profiler;
import com.google.devtools.build.lib.profiler.ProfilerTask;
import com.google.devtools.build.lib.profiler.SilentCloseable;
import com.google.devtools.build.lib.rules.cpp.IncludeScannable;
import com.google.devtools.build.lib.server.FailureDetails.Execution;
import com.google.devtools.build.lib.server.FailureDetails.Execution.Code;
import com.google.devtools.build.lib.server.FailureDetails.FailureDetail;
import com.google.devtools.build.lib.skyframe.ActionRewindStrategy.RewindPlan;
import com.google.devtools.build.lib.skyframe.ArtifactFunction.MissingFileArtifactValue;
import com.google.devtools.build.lib.skyframe.ArtifactNestedSetFunction.ArtifactNestedSetEvalException;
import com.google.devtools.build.lib.skyframe.SkyframeActionExecutor.ActionPostprocessing;
import com.google.devtools.build.lib.syntax.StarlarkSemantics;
import com.google.devtools.build.lib.util.DetailedExitCode;
import com.google.devtools.build.lib.util.Pair;
import com.google.devtools.build.lib.util.io.FileOutErr;
import com.google.devtools.build.lib.util.io.TimestampGranularityMonitor;
import com.google.devtools.build.lib.vfs.FileSystem;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Root;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException;
import com.google.devtools.build.skyframe.ValueOrException2;
import com.google.devtools.build.skyframe.ValueOrException3;
import java.io.IOException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.IntFunction;
import javax.annotation.Nullable;
/**
* A {@link SkyFunction} that creates {@link ActionExecutionValue}s. There are four points where
* this function can abort due to missing values in the graph:
*
* <ol>
* <li>For actions that discover inputs, if missing metadata needed to resolve an artifact from a
* string input in the action cache.
* <li>If missing metadata for artifacts in inputs (including the artifacts above).
* <li>For actions that discover inputs, if missing metadata for inputs discovered prior to
* execution.
* <li>For actions that discover inputs, but do so during execution, if missing metadata for
* inputs discovered during execution.
* </ol>
*/
public class ActionExecutionFunction implements SkyFunction {
private final ActionRewindStrategy actionRewindStrategy = new ActionRewindStrategy();
private final SkyframeActionExecutor skyframeActionExecutor;
private final BlazeDirectories directories;
private final AtomicReference<TimestampGranularityMonitor> tsgm;
private ConcurrentMap<Action, ContinuationState> stateMap;
public ActionExecutionFunction(
SkyframeActionExecutor skyframeActionExecutor,
BlazeDirectories directories,
AtomicReference<TimestampGranularityMonitor> tsgm) {
this.skyframeActionExecutor = skyframeActionExecutor;
this.directories = directories;
this.tsgm = tsgm;
// TODO(b/136156191): This stays in RAM while the SkyFunction of the action is pending, which
// can result in a lot of memory pressure if a lot of actions are pending.
stateMap = Maps.newConcurrentMap();
}
@Override
public SkyValue compute(SkyKey skyKey, Environment env)
throws ActionExecutionFunctionException, InterruptedException {
ActionLookupData actionLookupData = (ActionLookupData) skyKey.argument();
Action action = ActionUtils.getActionForLookupData(env, actionLookupData);
if (action == null) {
return null;
}
skyframeActionExecutor.noteActionEvaluationStarted(actionLookupData, action);
if (SkyframeActionExecutor.actionDependsOnBuildId(action)) {
PrecomputedValue.BUILD_ID.get(env);
}
if (skyframeActionExecutor.isBazelRemoteExecutionEnabled()) {
// Declaring a dependency on the precomputed value so that all actions are invalidated if
// the value of the flag changes. We are doing this conditionally only in Bazel if remote
// execution is available in order to not introduce additional skyframe edges in Blaze.
PrecomputedValue.REMOTE_OUTPUTS_MODE.get(env);
PrecomputedValue.REMOTE_DEFAULT_PLATFORM_PROPERTIES.get(env);
}
// Look up the parts of the environment that influence the action.
Map<SkyKey, SkyValue> clientEnvLookup =
env.getValues(
Iterables.transform(
action.getClientEnvironmentVariables(), ClientEnvironmentFunction::key));
if (env.valuesMissing()) {
return null;
}
Map<String, String> clientEnv = new HashMap<>();
for (Map.Entry<SkyKey, SkyValue> entry : clientEnvLookup.entrySet()) {
ClientEnvironmentValue envValue = (ClientEnvironmentValue) entry.getValue();
if (envValue.getValue() != null) {
clientEnv.put((String) entry.getKey().argument(), envValue.getValue());
}
}
ActionSketch sketch = null;
TopDownActionCache topDownActionCache = skyframeActionExecutor.getTopDownActionCache();
if (topDownActionCache != null) {
sketch = (ActionSketch) env.getValue(ActionSketchFunction.key(actionLookupData));
if (sketch == null) {
return null;
}
ActionExecutionValue actionExecutionValue = topDownActionCache.get(sketch);
if (actionExecutionValue != null) {
return actionExecutionValue.transformForSharedAction(action.getOutputs());
}
}
// For restarts of this ActionExecutionFunction we use a ContinuationState variable, below, to
// avoid redoing work.
//
// However, if two actions are shared and the first one executes, when the
// second one goes to execute, we should detect that and short-circuit, even without taking
// ContinuationState into account.
//
// Additionally, if an action restarted (in the Skyframe sense) after it executed because it
// discovered new inputs during execution, we should detect that and short-circuit.
ActionExecutionState previousExecution = skyframeActionExecutor.probeActionExecution(action);
// If this action was previously completed this build, then this evaluation must be happening
// because of rewinding. Prevent any ProgressLike events from being published a second time for
// this action; downstream consumers of action events reasonably don't expect them.
env = getProgressEventSuppressingEnvironmentIfPreviouslyCompleted(action, env);
if (action.discoversInputs()) {
// If this action previously failed due to a lost input found during input discovery, ensure
// that the input is regenerated before attempting discovery again.
if (declareDepsOnLostDiscoveredInputsIfAny(env, action)) {
return null;
}
}
ContinuationState state;
if (action.discoversInputs()) {
state = getState(action);
} else {
// Because this is a new state, all conditionals below about whether state has already done
// something will return false, and so we will execute all necessary steps.
state = new ContinuationState();
}
if (!state.hasCollectedInputs()) {
state.allInputs = collectInputs(action, env);
state.requestedArtifactNestedSetKeys = null;
if (state.allInputs == null) {
// Missing deps.
return null;
}
} else if (state.allInputs.keysRequested != null) {
// Preserve the invariant that we ask for the same deps each build.
env.getValues(state.allInputs.keysRequested);
Preconditions.checkState(!env.valuesMissing(), "%s %s", action, state);
}
CheckInputResults checkedInputs = null;
@Nullable
ImmutableSet<Artifact> mandatoryInputs =
action.discoversInputs() ? action.getMandatoryInputs().toSet() : null;
NestedSet<Artifact> allInputs = state.allInputs.getAllInputs();
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps = getInputDeps(env, allInputs, state);
// If there's a missing value.
if (inputDeps == null) {
return null;
}
try {
if (previousExecution == null && !state.hasArtifactData()) {
// Do we actually need to find our metadata?
checkedInputs = checkInputs(env, action, inputDeps, allInputs, mandatoryInputs);
}
} catch (ActionExecutionException e) {
// Remove action from state map in case it's there (won't be unless it discovers inputs).
stateMap.remove(action);
throw new ActionExecutionFunctionException(e);
}
if (env.valuesMissing()) {
// There was missing artifact metadata in the graph. Wait for it to be present.
// We must check this and return here before attempting to establish any Skyframe dependencies
// of the action; see establishSkyframeDependencies why.
return null;
}
Object skyframeDepsResult;
try {
skyframeDepsResult = establishSkyframeDependencies(env, action);
} catch (ActionExecutionException e) {
// Remove action from state map in case it's there (won't be unless it discovers inputs).
stateMap.remove(action);
throw new ActionExecutionFunctionException(
skyframeActionExecutor.processAndGetExceptionToThrow(
env.getListener(),
/*primaryOutputPath=*/ null,
action,
actionLookupData,
e,
new FileOutErr(),
ErrorTiming.BEFORE_EXECUTION));
}
if (env.valuesMissing()) {
return null;
}
if (checkedInputs != null) {
Preconditions.checkState(!state.hasArtifactData(), "%s %s", state, action);
state.inputArtifactData = checkedInputs.actionInputMap;
state.expandedArtifacts = checkedInputs.expandedArtifacts;
state.filesetsInsideRunfiles = checkedInputs.filesetsInsideRunfiles;
state.topLevelFilesets = checkedInputs.topLevelFilesets;
if (skyframeActionExecutor.actionFileSystemType().isEnabled()) {
state.actionFileSystem =
skyframeActionExecutor.createActionFileSystem(
directories.getRelativeOutputPath(),
checkedInputs.actionInputMap,
action.getOutputs(),
env.restartPermitted());
}
}
long actionStartTime = BlazeClock.nanoTime();
ActionExecutionValue result;
try {
result =
checkCacheAndExecuteIfNeeded(
action,
state,
env,
clientEnv,
actionLookupData,
previousExecution,
skyframeDepsResult,
actionStartTime);
} catch (LostInputsActionExecutionException e) {
return handleLostInputs(
e, actionLookupData, action, actionStartTime, env, inputDeps, allInputs, state);
} catch (ActionExecutionException e) {
// Remove action from state map in case it's there (won't be unless it discovers inputs).
stateMap.remove(action);
// In this case we do not report the error to the action reporter because we have already
// done it in SkyframeActionExecutor.reportErrorIfNotAbortingMode() method. That method
// prints the error in the top-level reporter and also dumps the recorded StdErr for the
// action. Label can be null in the case of, e.g., the SystemActionOwner (for build-info.txt).
throw new ActionExecutionFunctionException(new AlreadyReportedActionExecutionException(e));
}
if (env.valuesMissing()) {
// Only input-discovering actions are present in the stateMap. Other actions may have
// valuesMissing() here in rare circumstances related to Fileset inputs being unavailable.
// See comments in ActionInputMapHelper#getFilesets().
Preconditions.checkState(!action.discoversInputs() || stateMap.containsKey(action), action);
return null;
}
// Remove action from state map in case it's there (won't be unless it discovers inputs).
stateMap.remove(action);
if (sketch != null && result.dataIsShareable()) {
topDownActionCache.put(sketch, result);
}
return result;
}
/**
* Evaluate the supplied input deps. Declare deps on known inputs to action. We do this
* unconditionally to maintain our invariant of asking for the same deps each build.
*/
private static Map<
SkyKey,
ValueOrException3<IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
getInputDeps(Environment env, NestedSet<Artifact> allInputs, ContinuationState state)
throws InterruptedException {
if (evalInputsAsNestedSet(allInputs)) {
// We "unwrap" the NestedSet and evaluate the first layer of direct Artifacts here in order
// to save memory:
// - This top layer costs 1 extra ArtifactNestedSetKey node.
// - It's uncommon that 2 actions share the exact same set of inputs
// => the top layer offers little in terms of reusability.
// More details: b/143205147.
Iterable<SkyKey> directKeys = Artifact.keys(allInputs.getLeaves());
if (state.requestedArtifactNestedSetKeys == null) {
state.requestedArtifactNestedSetKeys = CompactHashSet.create();
for (NestedSet<Artifact> nonleaf : allInputs.getNonLeaves()) {
state.requestedArtifactNestedSetKeys.add(
new ArtifactNestedSetKey(nonleaf, nonleaf.toNode()));
}
}
if (ArtifactNestedSetFunction.evalKeysAsOneGroup()) {
return env.getValuesOrThrow(
Iterables.concat(directKeys, state.requestedArtifactNestedSetKeys),
IOException.class,
ActionExecutionException.class,
ArtifactNestedSetEvalException.class);
}
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
directArtifactValuesOrExceptions =
env.getValuesOrThrow(
directKeys,
IOException.class,
ActionExecutionException.class,
ArtifactNestedSetEvalException.class);
env.getValues(state.requestedArtifactNestedSetKeys);
if (env.valuesMissing()) {
return null;
}
ArtifactNestedSetFunction.getInstance()
.getArtifactSkyKeyToValueOrException()
.putAll(directArtifactValuesOrExceptions);
return ArtifactNestedSetFunction.getInstance().getArtifactSkyKeyToValueOrException();
}
return env.getValuesOrThrow(
Artifact.keys(allInputs.toList()),
IOException.class,
ActionExecutionException.class,
ArtifactNestedSetEvalException.class);
}
/**
* Do one traversal of the set to get the size. The traversal costs CPU time so only do it when
* necessary. The default case (without --experimental_nestedset_as_skykey_threshold) will ignore
* this path.
*/
private static boolean evalInputsAsNestedSet(NestedSet<Artifact> inputs) {
int nestedSetSizeThreshold = ArtifactNestedSetFunction.getSizeThreshold();
if (nestedSetSizeThreshold == 1) {
// Don't even flatten in this case.
return true;
}
return nestedSetSizeThreshold > 0
&& (inputs.memoizedFlattenAndGetSize() >= nestedSetSizeThreshold);
}
private Environment getProgressEventSuppressingEnvironmentIfPreviouslyCompleted(
Action action, Environment env) {
if (skyframeActionExecutor.probeCompletedAndReset(action)) {
return new ProgressEventSuppressingEnvironment(env);
}
return env;
}
private boolean declareDepsOnLostDiscoveredInputsIfAny(Environment env, Action action)
throws InterruptedException, ActionExecutionFunctionException {
ImmutableList<SkyKey> previouslyLostDiscoveredInputs =
skyframeActionExecutor.getLostDiscoveredInputs(action);
if (previouslyLostDiscoveredInputs != null) {
Map<SkyKey, ValueOrException2<MissingInputFileException, ActionExecutionException>>
lostInputValues =
env.getValuesOrThrow(
previouslyLostDiscoveredInputs,
MissingInputFileException.class,
ActionExecutionException.class);
if (env.valuesMissing()) {
return true;
}
for (Map.Entry<SkyKey, ValueOrException2<MissingInputFileException, ActionExecutionException>>
lostInput : lostInputValues.entrySet()) {
try {
lostInput.getValue().get();
} catch (MissingInputFileException e) {
// MissingInputFileException comes from problems with source artifact construction.
// Rewinding never invalidates source artifacts.
throw new IllegalStateException(
"MissingInputFileException unexpected from rewound generated discovered input. key="
+ lostInput.getKey(),
e);
} catch (ActionExecutionException e) {
throw new ActionExecutionFunctionException(e);
}
}
}
return false;
}
/**
* Clean up state associated with the current action execution attempt and return a {@link
* Restart} value which rewinds the actions that generate the lost inputs.
*/
private SkyFunction.Restart handleLostInputs(
LostInputsActionExecutionException e,
ActionLookupData actionLookupData,
Action action,
long actionStartTime,
Environment env,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs,
ContinuationState state)
throws InterruptedException, ActionExecutionFunctionException {
// Remove action from state map in case it's there (won't be unless it discovers inputs).
stateMap.remove(action);
boolean isPrimaryAction = e.isPrimaryAction(actionLookupData);
RewindPlan rewindPlan = null;
try {
ActionInputDepOwners inputDepOwners =
createAugmentedInputDepOwners(e, action, env, inputDeps, allInputs);
// Collect the set of direct deps of this action which may be responsible for the lost inputs,
// some of which may be discovered.
ImmutableList<SkyKey> lostDiscoveredInputs = ImmutableList.of();
Iterable<? extends SkyKey> failedActionDeps;
if (e.isFromInputDiscovery()) {
// Lost inputs found during input discovery are necessarily ordinary derived artifacts.
// Their keys may not be direct deps yet, but the next time this Skyframe node is evaluated
// they will be. See SkyframeActionExecutor's lostDiscoveredInputsMap.
lostDiscoveredInputs =
e.getLostInputs().values().stream()
.map(i -> (Artifact) i)
.map(Artifact::key)
.collect(ImmutableList.toImmutableList());
failedActionDeps = lostDiscoveredInputs;
} else if (state.discoveredInputs != null) {
failedActionDeps =
Iterables.concat(
inputDeps.keySet(),
Iterables.transform(state.discoveredInputs.toList(), Artifact::key));
} else {
failedActionDeps = inputDeps.keySet();
}
try {
rewindPlan =
actionRewindStrategy.getRewindPlan(
action, actionLookupData, failedActionDeps, e, inputDepOwners, env);
} catch (ActionExecutionException rewindingFailedException) {
// This call to processAndGetExceptionToThrow will emit an ActionExecutedEvent and report
// the error. The previous call to processAndGetExceptionToThrow didn't.
throw new ActionExecutionFunctionException(
new AlreadyReportedActionExecutionException(
skyframeActionExecutor.processAndGetExceptionToThrow(
env.getListener(),
e.getPrimaryOutputPath(),
action,
actionLookupData,
rewindingFailedException,
e.getFileOutErr(),
ActionExecutedEvent.ErrorTiming.AFTER_EXECUTION)));
}
if (isPrimaryAction) {
// This action is the "winner" amongst its set of shared actions. Only it must post events
// and clean up state associated with its shared action set.
if (e.isActionStartedEventAlreadyEmitted()) {
env.getListener().post(new ActionRewoundEvent(actionStartTime, action));
}
skyframeActionExecutor.resetFailedActionExecution(action, lostDiscoveredInputs);
for (Action actionToRestart : rewindPlan.getAdditionalActionsToRestart()) {
skyframeActionExecutor.resetPreviouslyCompletedActionExecution(actionToRestart);
}
}
return rewindPlan.getNodesToRestart();
} finally {
if (rewindPlan == null && isPrimaryAction && e.isActionStartedEventAlreadyEmitted()) {
// Rewinding was unsuccessful. SkyframeActionExecutor's ActionRunner didn't emit an
// ActionCompletionEvent because it hoped rewinding would fix things. Because it won't, this
// must emit one to compensate.
env.getListener()
.post(new ActionCompletionEvent(actionStartTime, action, actionLookupData));
}
}
}
/**
* Returns an augmented version of {@code e.getOwners()}'s {@link ActionInputDepOwners}, adding
* ownership information from {@code inputDeps}.
*
* <p>This compensates for how the ownership information in {@code e.getOwners()} is potentially
* incomplete. E.g., it may lack knowledge of a runfiles middleman owning a fileset, even if it
* knows that fileset owns a lost input.
*/
private ActionInputDepOwners createAugmentedInputDepOwners(
LostInputsActionExecutionException e,
Action action,
Environment env,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs)
throws InterruptedException {
Set<ActionInput> lostInputsAndOwnersSoFar = new HashSet<>();
ActionInputDepOwners owners = e.getOwners();
for (ActionInput lostInput : e.getLostInputs().values()) {
lostInputsAndOwnersSoFar.add(lostInput);
lostInputsAndOwnersSoFar.addAll(owners.getDepOwners(lostInput));
}
ActionInputDepOwnerMap inputDepOwners;
try {
inputDepOwners =
getInputDepOwners(
env,
action,
inputDeps,
allInputs,
action.discoversInputs() ? action.getMandatoryInputs().toSet() : null,
lostInputsAndOwnersSoFar);
} catch (ActionExecutionException unexpected) {
// getInputDepOwners should not be able to throw, because it does the same work as
// checkInputs, so if getInputDepOwners throws then checkInputs should have thrown, and if
// checkInputs threw then we shouldn't have reached this point in action execution.
throw new IllegalStateException(unexpected);
}
// Ownership information from inputDeps may be incomplete. Notably, it does not expand
// filesets. Fileset and other ownership relationships should have been captured in the
// exception's ActionInputDepOwners, and this copies that knowledge into the augmented version.
for (ActionInput lostInput : e.getLostInputs().values()) {
for (Artifact depOwner : owners.getDepOwners(lostInput)) {
inputDepOwners.addOwner(lostInput, depOwner);
}
}
return inputDepOwners;
}
/**
* An action's inputs needed for execution. May not just be the result of Action#getInputs(). If
* the action cache's view of this action contains additional inputs, it will request metadata for
* them, so we consider those inputs as dependencies of this action as well. Returns null if some
* dependencies were missing and this ActionExecutionFunction needs to restart.
*/
@Nullable
private AllInputs collectInputs(Action action, Environment env) throws InterruptedException {
NestedSet<Artifact> allKnownInputs = action.getInputs();
if (action.inputsDiscovered()) {
return new AllInputs(allKnownInputs);
}
Preconditions.checkState(action.discoversInputs(), action);
PackageRootResolverWithEnvironment resolver = new PackageRootResolverWithEnvironment(env);
List<Artifact> actionCacheInputs =
skyframeActionExecutor.getActionCachedInputs(action, resolver);
if (actionCacheInputs == null) {
Preconditions.checkState(env.valuesMissing(), action);
return null;
}
return new AllInputs(allKnownInputs, actionCacheInputs, resolver.keysRequested);
}
private static class AllInputs {
final NestedSet<Artifact> defaultInputs;
@Nullable final List<Artifact> actionCacheInputs;
@Nullable final List<SkyKey> keysRequested;
AllInputs(NestedSet<Artifact> defaultInputs) {
this.defaultInputs = checkNotNull(defaultInputs);
this.actionCacheInputs = null;
this.keysRequested = null;
}
AllInputs(
NestedSet<Artifact> defaultInputs,
List<Artifact> actionCacheInputs,
List<SkyKey> keysRequested) {
this.defaultInputs = checkNotNull(defaultInputs);
this.actionCacheInputs = checkNotNull(actionCacheInputs);
this.keysRequested = keysRequested;
}
NestedSet<Artifact> getAllInputs() {
if (actionCacheInputs == null) {
return defaultInputs;
}
NestedSetBuilder<Artifact> builder = new NestedSetBuilder<>(Order.STABLE_ORDER);
// actionCacheInputs is never a NestedSet.
builder.addAll(actionCacheInputs);
builder.addTransitive(defaultInputs);
return builder.build();
}
}
/**
* Skyframe implementation of {@link PackageRootResolver}. Should be used only from SkyFunctions,
* because it uses SkyFunction.Environment for evaluation of ContainingPackageLookupValue.
*/
private static class PackageRootResolverWithEnvironment implements PackageRootResolver {
final List<SkyKey> keysRequested = new ArrayList<>();
private final Environment env;
private PackageRootResolverWithEnvironment(Environment env) {
this.env = env;
}
@Override
public Map<PathFragment, Root> findPackageRootsForFiles(Iterable<PathFragment> execPaths)
throws InterruptedException {
Preconditions.checkState(
keysRequested.isEmpty(),
"resolver should only be called once: %s %s",
keysRequested,
execPaths);
StarlarkSemantics starlarkSemantics = PrecomputedValue.STARLARK_SEMANTICS.get(env);
if (starlarkSemantics == null) {
return null;
}
boolean siblingRepositoryLayout = starlarkSemantics.experimentalSiblingRepositoryLayout();
// Create SkyKeys list based on execPaths.
Map<PathFragment, SkyKey> depKeys = new HashMap<>();
for (PathFragment path : execPaths) {
PathFragment parent =
checkNotNull(path.getParentDirectory(), "Must pass in files, not root directory");
Preconditions.checkArgument(!parent.isAbsolute(), path);
SkyKey depKey =
ContainingPackageLookupValue.key(
PackageIdentifier.discoverFromExecPath(path, true, siblingRepositoryLayout));
depKeys.put(path, depKey);
keysRequested.add(depKey);
}
Map<SkyKey, SkyValue> values = env.getValues(depKeys.values());
if (env.valuesMissing()) {
return null;
}
Map<PathFragment, Root> result = new HashMap<>();
for (PathFragment path : execPaths) {
if (!depKeys.containsKey(path)) {
continue;
}
ContainingPackageLookupValue value =
(ContainingPackageLookupValue) values.get(depKeys.get(path));
if (value.hasContainingPackage()) {
// We have found corresponding root for current execPath.
result.put(
path,
SkyframeExecutor.maybeTransformRootForRepository(
value.getContainingPackageRoot(),
value.getContainingPackageName().getRepository()));
} else {
// We haven't found corresponding root for current execPath.
result.put(path, null);
}
}
return result;
}
}
private ActionExecutionValue checkCacheAndExecuteIfNeeded(
Action action,
ContinuationState state,
Environment env,
Map<String, String> clientEnv,
ActionLookupData actionLookupData,
@Nullable ActionExecutionState previousAction,
Object skyframeDepsResult,
long actionStartTime)
throws ActionExecutionException, InterruptedException {
if (previousAction != null) {
// There are two cases where we can already have an executing action for a specific output:
// 1. Another instance of a shared action won the race and got executed first.
// 2. The action was already started earlier, and this SkyFunction got restarted since
// there's progress to be made.
// In either case, we must use this continuation to continue. Note that in the first case,
// we don't have any input metadata available, so we couldn't re-execute the action even if we
// wanted to.
return previousAction.getResultOrDependOnFuture(
env,
actionLookupData,
action,
skyframeActionExecutor.getSharedActionCallback(
env.getListener(), state.discoveredInputs != null, action, actionLookupData));
}
ImmutableMap<Artifact, ImmutableList<FilesetOutputSymlink>> expandedFilesets;
if (state.topLevelFilesets == null || state.topLevelFilesets.isEmpty()) {
expandedFilesets = ImmutableMap.copyOf(state.filesetsInsideRunfiles);
} else {
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsMap =
new HashMap<>(state.filesetsInsideRunfiles);
filesetsMap.putAll(state.topLevelFilesets);
expandedFilesets = ImmutableMap.copyOf(filesetsMap);
}
// The metadataHandler may be recreated if we discover inputs.
ArtifactPathResolver pathResolver =
ArtifactPathResolver.createPathResolver(
state.actionFileSystem, skyframeActionExecutor.getExecRoot());
ActionMetadataHandler metadataHandler =
new ActionMetadataHandler(
state.inputArtifactData,
expandedFilesets,
/* missingArtifactsAllowed= */ action.discoversInputs(),
action.getOutputs(),
tsgm.get(),
pathResolver,
newOutputStore(state),
skyframeActionExecutor.getExecRoot());
// We only need to check the action cache if we haven't done it on a previous run.
if (!state.hasCheckedActionCache()) {
state.token =
skyframeActionExecutor.checkActionCache(
env.getListener(),
action,
metadataHandler,
actionStartTime,
state.allInputs.actionCacheInputs,
clientEnv,
pathResolver);
}
if (state.token == null) {
// We got a hit from the action cache -- no need to execute.
Preconditions.checkState(
!(action instanceof SkyframeAwareAction),
"Error, we're not re-executing a "
+ "SkyframeAwareAction which should be re-executed unconditionally. Action: %s",
action);
return ActionExecutionValue.createFromOutputStore(
metadataHandler.getOutputStore(),
/*outputSymlinks=*/ null,
(action instanceof IncludeScannable)
? ((IncludeScannable) action).getDiscoveredModules()
: null,
SkyframeActionExecutor.actionDependsOnBuildId(action));
}
// Delete the metadataHandler's cache of the action's outputs, since they are being deleted.
metadataHandler.discardOutputMetadata();
if (action.discoversInputs()) {
Duration discoveredInputsDuration = Duration.ZERO;
if (state.discoveredInputs == null) {
try (SilentCloseable c = Profiler.instance().profile(ProfilerTask.INFO, "discoverInputs")) {
try {
state.updateFileSystemContext(
skyframeActionExecutor, env, metadataHandler, ImmutableMap.of());
} catch (IOException e) {
throw new ActionExecutionException(
"Failed to update filesystem context: " + e.getMessage(),
e,
action,
/*catastrophe=*/ false);
}
try {
state.discoveredInputs =
skyframeActionExecutor.discoverInputs(
action,
actionLookupData,
metadataHandler,
skyframeActionExecutor.probeCompletedAndReset(action)
? SkyframeActionExecutor.ProgressEventBehavior.SUPPRESS
: SkyframeActionExecutor.ProgressEventBehavior.EMIT,
env,
state.actionFileSystem);
} catch (IOException e) {
throw new ActionExecutionException(
"Failed during input discovery: " + e.getMessage(),
e,
action,
/*catastrophe=*/ false);
} finally {
discoveredInputsDuration = Duration.ofNanos(BlazeClock.nanoTime() - actionStartTime);
}
Preconditions.checkState(
env.valuesMissing() == (state.discoveredInputs == null),
"discoverInputs() must return null iff requesting more dependencies.");
if (state.discoveredInputs == null) {
return null;
}
} catch (MissingDepException e) {
Preconditions.checkState(env.valuesMissing(), action);
return null;
}
}
switch (addDiscoveredInputs(
state.inputArtifactData,
state.expandedArtifacts,
state.filterKnownDiscoveredInputs(),
env,
action)) {
case VALUES_MISSING:
return null;
case NO_DISCOVERED_DATA:
break;
case DISCOVERED_DATA:
metadataHandler =
new ActionMetadataHandler(
state.inputArtifactData,
expandedFilesets,
/*missingArtifactsAllowed=*/ false,
action.getOutputs(),
tsgm.get(),
pathResolver,
newOutputStore(state),
skyframeActionExecutor.getExecRoot());
// Set the MetadataHandler to accept output information.
metadataHandler.discardOutputMetadata();
}
// When discover inputs completes, post an event with the duration values.
env.getListener()
.post(
new DiscoveredInputsEvent(
SpawnMetrics.Builder.forOtherExec()
.setParseTime(discoveredInputsDuration)
.setTotalTime(discoveredInputsDuration)
.build(),
action,
actionStartTime));
}
try {
state.updateFileSystemContext(skyframeActionExecutor, env, metadataHandler, expandedFilesets);
} catch (IOException e) {
throw new ActionExecutionException(
"Failed to update filesystem context: " + e.getMessage(),
e,
action,
/*catastrophe=*/ false);
}
ActionExecutionContext actionExecutionContext =
skyframeActionExecutor.getContext(
action,
metadataHandler,
skyframeActionExecutor.probeCompletedAndReset(action)
? SkyframeActionExecutor.ProgressEventBehavior.SUPPRESS
: SkyframeActionExecutor.ProgressEventBehavior.EMIT,
Collections.unmodifiableMap(state.expandedArtifacts),
expandedFilesets,
ImmutableMap.copyOf(state.topLevelFilesets),
state.actionFileSystem,
skyframeDepsResult,
env.getListener(),
env.restartPermitted());
ActionExecutionValue result;
try {
result =
skyframeActionExecutor.executeAction(
env,
action,
metadataHandler,
actionStartTime,
actionExecutionContext,
actionLookupData,
new ActionPostprocessingImpl(state),
state.discoveredInputs != null);
} catch (ActionExecutionException e) {
try {
actionExecutionContext.close();
} catch (IOException | RuntimeException e2) {
e.addSuppressed(e2);
}
throw e;
}
if (result != null) {
try {
actionExecutionContext.close();
} catch (IOException e) {
throw new ActionExecutionException(
"Failed to close action output: " + e.getMessage(), e, action, /*catastrophe=*/ false);
}
}
return result;
}
private OutputStore newOutputStore(ContinuationState state) {
Preconditions.checkState(
!skyframeActionExecutor.actionFileSystemType().isEnabled()
|| state.actionFileSystem != null,
"actionFileSystem must not be null");
if (skyframeActionExecutor.actionFileSystemType().inMemoryFileSystem()) {
return new MinimalOutputStore();
}
return new OutputStore();
}
/** Implementation of {@link ActionPostprocessing}. */
private final class ActionPostprocessingImpl implements ActionPostprocessing {
private final ContinuationState state;
ActionPostprocessingImpl(ContinuationState state) {
this.state = state;
}
public void run(
Environment env,
Action action,
ActionMetadataHandler metadataHandler,
Map<String, String> clientEnv)
throws InterruptedException, ActionExecutionException {
if (action.discoversInputs()) {
state.discoveredInputs = action.getInputs();
switch (addDiscoveredInputs(
state.inputArtifactData,
state.expandedArtifacts,
state.filterKnownDiscoveredInputs(),
env,
action)) {
case VALUES_MISSING:
return;
case NO_DISCOVERED_DATA:
break;
case DISCOVERED_DATA:
// We are in the interesting case of an action that discovered its inputs during
// execution, and found some new ones, but the new ones were already present in the
// graph. We must therefore cache the metadata for those new ones.
Map<Artifact, ImmutableList<FilesetOutputSymlink>> expandedFilesets =
new HashMap<>(state.filesetsInsideRunfiles);
expandedFilesets.putAll(state.topLevelFilesets);
metadataHandler =
new ActionMetadataHandler(
state.inputArtifactData,
expandedFilesets,
/*missingArtifactsAllowed=*/ false,
action.getOutputs(),
tsgm.get(),
metadataHandler.getArtifactPathResolver(),
metadataHandler.getOutputStore(),
skyframeActionExecutor.getExecRoot());
}
}
Preconditions.checkState(!env.valuesMissing(), action);
skyframeActionExecutor.updateActionCache(action, metadataHandler, state.token, clientEnv);
}
}
private enum DiscoveredState {
VALUES_MISSING,
NO_DISCOVERED_DATA,
DISCOVERED_DATA
}
private DiscoveredState addDiscoveredInputs(
ActionInputMap inputData,
Map<Artifact, Collection<Artifact>> expandedArtifacts,
Iterable<Artifact> discoveredInputs,
Environment env,
Action actionForError)
throws InterruptedException, ActionExecutionException {
// In most cases we don't need to handle exceptions here, because derived inputs were already
// (transitively) requested, so we shouldn't have reached here, and non-mandatory inputs don't
// throw exceptions. However, in nokeep-going mode, a missing discovered input will result in an
// IOException that won't turn into a MissingInputFileArtifactValue, and so we have to transform
// it here.
Map<SkyKey, ValueOrException<IOException>> nonMandatoryDiscovered =
env.getValuesOrThrow(
Iterables.transform(discoveredInputs, Artifact::key), IOException.class);
if (env.valuesMissing()) {
return DiscoveredState.VALUES_MISSING;
}
if (nonMandatoryDiscovered.isEmpty()) {
return DiscoveredState.NO_DISCOVERED_DATA;
}
for (Artifact input : discoveredInputs) {
SkyValue retrievedMetadata;
try {
retrievedMetadata = nonMandatoryDiscovered.get(Artifact.key(input)).get();
} catch (IOException e) {
if (!input.isSourceArtifact()) {
BugReport.sendBugReport(
new IllegalStateException("Non-source artifact had IO Exception" + input, e));
}
MissingFileArtifactValue missingValue =
ArtifactFunction.makeMissingInputFileValue(input, e);
MissingInputFileException missingException = missingValue.getException();
skyframeActionExecutor.printError(
String.format(
"%s: %s", actionForError.getOwner().getLabel(), missingException.getMessage()),
actionForError,
null);
// We don't create a specific cause for the artifact as we do in #handleMissingFile because
// it likely has no label, so we'd have to use the Action's label anyway. Just use the
// default ActionFailed event constructed by ActionExecutionException.
String message = "discovered input file does not exist";
DetailedExitCode code =
createDetailedExitCode(message, Code.DISCOVERED_INPUT_DOES_NOT_EXIST);
throw new ActionExecutionException(message, actionForError, false, code);
}
if (retrievedMetadata instanceof TreeArtifactValue) {
TreeArtifactValue treeValue = (TreeArtifactValue) retrievedMetadata;
expandedArtifacts.put(input, ImmutableSet.copyOf(treeValue.getChildren()));
for (Map.Entry<Artifact.TreeFileArtifact, FileArtifactValue> child :
treeValue.getChildValues().entrySet()) {
inputData.putWithNoDepOwner(child.getKey(), child.getValue());
}
inputData.putWithNoDepOwner(input, treeValue.getSelfData());
} else if (retrievedMetadata instanceof ActionExecutionValue) {
inputData.putWithNoDepOwner(
input, ((ActionExecutionValue) retrievedMetadata).getExistingFileArtifactValue(input));
} else if (retrievedMetadata instanceof MissingFileArtifactValue) {
inputData.putWithNoDepOwner(input, FileArtifactValue.MISSING_FILE_MARKER);
} else if (retrievedMetadata instanceof FileArtifactValue) {
inputData.putWithNoDepOwner(input, (FileArtifactValue) retrievedMetadata);
} else {
throw new IllegalStateException(
"unknown metadata for " + input.getExecPathString() + ": " + retrievedMetadata);
}
}
return DiscoveredState.DISCOVERED_DATA;
}
private static <E extends Exception> Object establishSkyframeDependencies(
Environment env, Action action) throws ActionExecutionException, InterruptedException {
// Before we may safely establish Skyframe dependencies, we must build all action inputs by
// requesting their ArtifactValues.
// This is very important to do, because the establishSkyframeDependencies method may request
// FileValues for input files of this action (directly requesting them, or requesting some other
// SkyValue whose builder requests FileValues), which may not yet exist if their generating
// actions have not yet run.
// See SkyframeAwareActionTest.testRaceConditionBetweenInputAcquisitionAndSkyframeDeps
Preconditions.checkState(!env.valuesMissing(), action);
if (action instanceof SkyframeAwareAction) {
// Skyframe-aware actions should be executed unconditionally, i.e. bypass action cache
// checking. See documentation of SkyframeAwareAction.
Preconditions.checkState(action.executeUnconditionally(), action);
@SuppressWarnings("unchecked")
SkyframeAwareAction<E> skyframeAwareAction = (SkyframeAwareAction<E>) action;
ImmutableList<? extends SkyKey> keys = skyframeAwareAction.getDirectSkyframeDependencies();
Map<SkyKey, ValueOrException<E>> values =
env.getValuesOrThrow(keys, skyframeAwareAction.getExceptionType());
try {
return skyframeAwareAction.processSkyframeValues(keys, values, env.valuesMissing());
} catch (SkyframeAwareAction.ExceptionBase e) {
throw new ActionExecutionException(
e, action, false, DetailedExitCode.of(e.getFailureDetail()));
}
}
return null;
}
private static class CheckInputResults {
/** Metadata about Artifacts consumed by this Action. */
private final ActionInputMap actionInputMap;
/** Artifact expansion mapping for Runfiles tree and tree artifacts. */
private final Map<Artifact, Collection<Artifact>> expandedArtifacts;
/** Artifact expansion mapping for Filesets embedded in Runfiles. */
private final Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles;
/** Artifact expansion mapping for top level filesets. */
private final Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets;
public CheckInputResults(
ActionInputMap actionInputMap,
Map<Artifact, Collection<Artifact>> expandedArtifacts,
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles,
Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets) {
this.actionInputMap = actionInputMap;
this.expandedArtifacts = expandedArtifacts;
this.filesetsInsideRunfiles = filesetsInsideRunfiles;
this.topLevelFilesets = topLevelFilesets;
}
}
private interface AccumulateInputResultsFactory<S extends ActionInputMapSink, R> {
R create(
S actionInputMapSink,
Map<Artifact, Collection<Artifact>> expandedArtifacts,
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles,
Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets);
}
/**
* Declare dependency on all known inputs of action. Throws exception if any are known to be
* missing. Some inputs may not yet be in the graph, in which case the builder should abort.
*/
private CheckInputResults checkInputs(
Environment env,
Action action,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs,
ImmutableSet<Artifact> mandatoryInputs)
throws ActionExecutionException, InterruptedException {
return accumulateInputs(
env,
action,
inputDeps,
allInputs,
mandatoryInputs,
ActionInputMap::new,
CheckInputResults::new);
}
/**
* Reconstructs the relationships between lost inputs and the direct deps responsible for them.
*/
private ActionInputDepOwnerMap getInputDepOwners(
Environment env,
Action action,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs,
ImmutableSet<Artifact> mandatoryInputs,
Collection<ActionInput> lostInputs)
throws ActionExecutionException, InterruptedException {
return accumulateInputs(
env,
action,
inputDeps,
allInputs,
mandatoryInputs,
ignoredInputDepsSize -> new ActionInputDepOwnerMap(lostInputs),
(actionInputMapSink, expandedArtifacts, filesetsInsideRunfiles, topLevelFilesets) ->
actionInputMapSink);
}
private <S extends ActionInputMapSink, R> R accumulateInputs(
Environment env,
Action action,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs,
ImmutableSet<Artifact> mandatoryInputs,
IntFunction<S> actionInputMapSinkFactory,
AccumulateInputResultsFactory<S, R> accumulateInputResultsFactory)
throws ActionExecutionException, InterruptedException {
if (evalInputsAsNestedSet(allInputs) && ArtifactNestedSetFunction.evalKeysAsOneGroup()) {
return accumulateInputsWithNestedSet(
env,
action,
inputDeps,
allInputs,
mandatoryInputs,
actionInputMapSinkFactory,
accumulateInputResultsFactory);
}
// Only populate input data if we have the input values, otherwise they'll just go unused.
// We still want to loop through the inputs to collect missing deps errors. During the
// evaluator "error bubbling", we may get one last chance at reporting errors even though
// some deps are still missing.
boolean populateInputData = !env.valuesMissing();
// Errors unexpected: save garbage on initialization.
List<LabelCause> missingArtifactCauses = Lists.newArrayListWithCapacity(0);
List<NestedSet<Cause>> transitiveCauses = Lists.newArrayListWithCapacity(0);
ImmutableList<Artifact> allInputsList = allInputs.toList();
S inputArtifactData =
actionInputMapSinkFactory.apply(populateInputData ? allInputsList.size() : 0);
Map<Artifact, Collection<Artifact>> expandedArtifacts =
new HashMap<>(populateInputData ? 128 : 0);
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles =
Maps.newHashMapWithExpectedSize(0);
Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets =
Maps.newHashMapWithExpectedSize(0);
ActionExecutionException firstActionExecutionException = null;
for (Artifact input : allInputsList) {
ValueOrException3<IOException, ActionExecutionException, ArtifactNestedSetEvalException>
valueOrException = inputDeps.get(Artifact.key(input));
if (valueOrException == null) {
continue;
}
// Some inputs do not need to exist: we depend on the inputs of the action as registered in
// the action cache so that we can verify the validity of the cache entry, but if the
// reference to the file went away together with the file itself (e.g. when deleting a file
// and removing the #include statement referencing it), we re-execute the action anyway so it
// does not matter if the file is missing.
//
// This mechanism fails, though, if we remove a #include statement referencing a header and
// then introduce a symlink cycle in its place: then there will be an IOException which will
// be propagated even though we shouldn't have read the file in the first place. This is not
// really avoidable (at least not without redesigning the action cache), because once the
// ArtifactFunction throws an exception, Skyframe evaluation must stop, so all we can do is
// signal the error in a more meaningful way.
//
// In particular, making it possible to check only the up-to-dateness of mandatory inputs in
// the action cache is not enough: it can be that the reference to the symlink cycle arose
// from a discovered input, so even though no mandatory inputs change, it can still be that
// the need to read the newly introduced symlink cycle went away.
boolean mandatory =
!input.isSourceArtifact() || mandatoryInputs == null || mandatoryInputs.contains(input);
SkyValue value = FileArtifactValue.MISSING_FILE_MARKER;
try {
value = valueOrException.get();
} catch (IOException e) {
if (!input.isSourceArtifact()) {
BugReport.sendBugReport(
new IllegalStateException(
"Unexpected IOException for generated artifact: " + input + ", " + action, e));
}
if (mandatory) {
missingArtifactCauses.add(
handleMissingFile(
input,
ArtifactFunction.makeMissingInputFileValue(input, e),
action.getOwner().getLabel()));
continue;
}
} catch (ActionExecutionException e) {
if (mandatory) {
// Prefer a catastrophic exception as the one we propagate.
if (firstActionExecutionException == null
|| (!firstActionExecutionException.isCatastrophe() && e.isCatastrophe())) {
firstActionExecutionException = e;
}
transitiveCauses.add(e.getRootCauses());
continue;
}
} catch (ArtifactNestedSetEvalException e) {
throw new IllegalStateException(
"Unexpected ArtifactNestedSetEvalException for non-NSOS build: "
+ input
+ ", "
+ action,
e);
}
if (value instanceof MissingFileArtifactValue) {
if (mandatory) {
missingArtifactCauses.add(
handleMissingFile(
input, (MissingFileArtifactValue) value, action.getOwner().getLabel()));
continue;
} else {
value = FileArtifactValue.MISSING_FILE_MARKER;
}
}
if (populateInputData) {
ActionInputMapHelper.addToMap(
inputArtifactData,
expandedArtifacts,
filesetsInsideRunfiles,
topLevelFilesets,
input,
value,
env);
}
}
if (!missingArtifactCauses.isEmpty()) {
for (LabelCause missingInput : missingArtifactCauses) {
skyframeActionExecutor.printError(
String.format("%s: %s", action.getOwner().getLabel(), missingInput.getMessage()),
action,
null);
}
}
// We need to rethrow first exception because it can contain useful error message
if (firstActionExecutionException != null) {
if (missingArtifactCauses.isEmpty() && (checkNotNull(transitiveCauses, action).size() == 1)) {
// In the case a single action failed, just propagate the exception upward. This avoids
// having to copy the root causes to the upwards transitive closure.
throw firstActionExecutionException;
}
NestedSetBuilder<Cause> allCauses =
NestedSetBuilder.<Cause>stableOrder().addAll(missingArtifactCauses);
transitiveCauses.forEach(allCauses::addTransitive);
throw new ActionExecutionException(
firstActionExecutionException.getMessage(),
firstActionExecutionException.getCause(),
action,
allCauses.build(),
firstActionExecutionException.isCatastrophe(),
firstActionExecutionException.getDetailedExitCode());
}
if (!missingArtifactCauses.isEmpty()) {
throw createMissingInputsException(action, missingArtifactCauses);
}
return accumulateInputResultsFactory.create(
inputArtifactData, expandedArtifacts, filesetsInsideRunfiles, topLevelFilesets);
}
/**
* A refactoring of the existing #accumulateInputs to separate error-handling and
* input-accumulating steps. Check if there's any value missing in the env after error handling is
* done before proceeding.
*/
private <S extends ActionInputMapSink, R> R accumulateInputsWithNestedSet(
Environment env,
Action action,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
NestedSet<Artifact> allInputs,
ImmutableSet<Artifact> mandatoryInputs,
IntFunction<S> actionInputMapSinkFactory,
AccumulateInputResultsFactory<S, R> accumulateInputResultsFactory)
throws ActionExecutionException, InterruptedException {
Map<SkyKey, SkyValue> artifactSkyKeyToSkyValue =
ArtifactNestedSetFunction.getInstance().getArtifactSkyKeyToSkyValue();
ImmutableList<Artifact> allInputsList = allInputs.toList();
// Some keys have more than 1 corresponding Artifact (e.g. actions with 2 outputs).
Multimap<SkyKey, Artifact> skyKeyToArtifactOrSet =
MultimapBuilder.hashKeys().hashSetValues().build();
allInputsList.forEach(input -> skyKeyToArtifactOrSet.put(Artifact.key(input), input));
ActionExecutionFunctionExceptionHandler actionExecutionFunctionExceptionHandler =
new ActionExecutionFunctionExceptionHandler(
skyKeyToArtifactOrSet,
inputDeps,
artifactSkyKeyToSkyValue,
action,
mandatoryInputs,
skyframeActionExecutor);
actionExecutionFunctionExceptionHandler.accumulateAndThrowExceptions();
// All exceptions from dependencies handled, it's now safe to check for missing values.
if (env.valuesMissing()) {
return null;
}
// When there are no missing values, we can start populating input data.
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles =
Maps.newHashMapWithExpectedSize(0);
Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets =
Maps.newHashMapWithExpectedSize(0);
S inputArtifactData = actionInputMapSinkFactory.apply(allInputsList.size());
Map<Artifact, Collection<Artifact>> expandedArtifacts = Maps.newHashMapWithExpectedSize(128);
for (Artifact input : allInputsList) {
SkyValue value = artifactSkyKeyToSkyValue.get(Artifact.key(input));
if (value instanceof MissingFileArtifactValue) {
if (actionExecutionFunctionExceptionHandler.isMandatory(input)) {
actionExecutionFunctionExceptionHandler.accumulateMissingFileArtifactValue(
input, (MissingFileArtifactValue) value);
continue;
} else {
value = FileArtifactValue.MISSING_FILE_MARKER;
}
}
ActionInputMapHelper.addToMap(
inputArtifactData,
expandedArtifacts,
filesetsInsideRunfiles,
topLevelFilesets,
input,
value,
env);
}
// After accumulating the inputs, we might find some mandatory artifact with
// MissingFileArtifactValue.
actionExecutionFunctionExceptionHandler.maybeThrowException();
return accumulateInputResultsFactory.create(
inputArtifactData, expandedArtifacts, filesetsInsideRunfiles, topLevelFilesets);
}
static LabelCause handleMissingFile(
Artifact input, MissingFileArtifactValue missingValue, Label labelInCaseOfBug) {
MissingInputFileException e = missingValue.getException();
Label inputLabel = input.getOwner();
if (inputLabel == null) {
BugReport.sendBugReport(
new IllegalStateException(
String.format(
"Artifact %s with missing value %s should have owner (%s)",
input, e.getMessage(), labelInCaseOfBug)));
inputLabel = labelInCaseOfBug;
}
return new LabelCause(inputLabel, e.getMessage());
}
@Override
public String extractTag(SkyKey skyKey) {
// The return value from this method is only applied to non-error, non-debug events that are
// posted through the EventHandler associated with the SkyFunction.Environment. For those
// events, this setting overrides whatever tag is set.
//
// If action out/err replay is enabled, then we intentionally post through the Environment to
// ensure that the output is replayed on subsequent builds. In that case, we need this to be the
// action owner's label.
//
// Otherwise, Events from action execution are posted to the global Reporter rather than through
// the Environment, so this setting is ignored. Note that the SkyframeActionExecutor manually
// checks the action owner's label against the Reporter's output filter in that case, which has
// the same effect as setting it as a tag on the corresponding event.
return Label.print(((ActionLookupData) skyKey).getActionLookupKey().getLabel());
}
/**
* Should be called once execution is over, and the intra-build cache of in-progress computations
* should be discarded. If the cache is non-empty (due to an interrupted/failed build), failure to
* call complete() can both cause a memory leak and incorrect results on the subsequent build.
*/
public void complete(ExtendedEventHandler eventHandler) {
// Discard all remaining state (there should be none after a successful execution).
stateMap = Maps.newConcurrentMap();
actionRewindStrategy.reset(eventHandler);
}
private ContinuationState getState(Action action) {
ContinuationState state = stateMap.get(action);
if (state == null) {
state = new ContinuationState();
Preconditions.checkState(stateMap.put(action, state) == null, action);
}
return state;
}
/**
* State to save work across restarts of ActionExecutionFunction due to missing values in the
* graph for actions that discover inputs. There are three places where we save work, all for
* actions that discover inputs:
*
* <ol>
* <li>If not all known input metadata (coming from Action#getInputs) is available yet, then the
* calculated set of inputs (including the inputs resolved from the action cache) is saved.
* <li>If not all discovered inputs' metadata is available yet, then the known input metadata
* together with the set of discovered inputs is saved, as well as the Token used to
* identify this action to the action cache.
* <li>If, after execution, new inputs are discovered whose metadata is not yet available, then
* the same data as in the previous case is saved, along with the actual result of
* execution.
* </ol>
*/
private static class ContinuationState {
AllInputs allInputs;
/** Mutable map containing metadata for known artifacts. */
ActionInputMap inputArtifactData = null;
Map<Artifact, Collection<Artifact>> expandedArtifacts = null;
Map<Artifact, ImmutableList<FilesetOutputSymlink>> filesetsInsideRunfiles = null;
Map<Artifact, ImmutableList<FilesetOutputSymlink>> topLevelFilesets = null;
Token token = null;
NestedSet<Artifact> discoveredInputs = null;
FileSystem actionFileSystem = null;
/**
* Stores the ArtifactNestedSetKeys created from the inputs of this actions. Objective: avoid
* creating a new ArtifactNestedSetKey for the same NestedSet each time we run
* ActionExecutionFunction for the same action. This is wiped everytime allInputs is updated.
*/
CompactHashSet<SkyKey> requestedArtifactNestedSetKeys = null;
boolean hasCollectedInputs() {
return allInputs != null;
}
boolean hasArtifactData() {
boolean result = inputArtifactData != null;
Preconditions.checkState(result == (expandedArtifacts != null), this);
return result;
}
boolean hasCheckedActionCache() {
// If token is null because there was an action cache hit, this method is never called again
// because we return immediately.
return token != null;
}
/** Must be called to assign values to the given variables as they change. */
void updateFileSystemContext(
SkyframeActionExecutor executor,
Environment env,
ActionMetadataHandler metadataHandler,
ImmutableMap<Artifact, ImmutableList<FilesetOutputSymlink>> filesets)
throws IOException {
if (actionFileSystem != null) {
executor.updateActionFileSystemContext(
actionFileSystem, env, metadataHandler.getOutputStore()::injectOutputData, filesets);
}
}
Iterable<Artifact> filterKnownDiscoveredInputs() {
return Iterables.filter(
discoveredInputs.toList(), input -> inputArtifactData.getMetadata(input) == null);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("token", token)
.add("allInputs", allInputs)
.add("inputArtifactData", inputArtifactData)
.add("discoveredInputs", discoveredInputs)
.toString();
}
}
/**
* Used to declare all the exception types that can be wrapped in the exception thrown by {@link
* ActionExecutionFunction#compute}.
*/
static final class ActionExecutionFunctionException extends SkyFunctionException {
private final ActionExecutionException actionException;
ActionExecutionFunctionException(ActionExecutionException e) {
// We conservatively assume that the error is transient. We don't have enough information to
// distinguish non-transient errors (e.g. compilation error from a deterministic compiler)
// from transient ones (e.g. IO error).
// TODO(bazel-team): Have ActionExecutionExceptions declare their transience.
super(e, Transience.TRANSIENT);
this.actionException = e;
}
@Override
public boolean isCatastrophic() {
return actionException.isCatastrophe();
}
}
/** Helper subclass for the error-handling logic for ActionExecutionFunction#accumulateInputs. */
private static final class ActionExecutionFunctionExceptionHandler {
private final Multimap<SkyKey, Artifact> skyKeyToArtifactSet;
private final Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps;
private final Map<SkyKey, SkyValue> artifactSkyKeyToSkyValue;
private final Action action;
private final Set<Artifact> mandatoryInputs;
private final SkyframeActionExecutor skyframeActionExecutor;
List<LabelCause> missingArtifactCauses = Lists.newArrayListWithCapacity(0);
List<NestedSet<Cause>> transitiveCauses = Lists.newArrayListWithCapacity(0);
private ActionExecutionException firstActionExecutionException;
ActionExecutionFunctionExceptionHandler(
Multimap<SkyKey, Artifact> skyKeyToArtifactSet,
Map<
SkyKey,
ValueOrException3<
IOException, ActionExecutionException, ArtifactNestedSetEvalException>>
inputDeps,
Map<SkyKey, SkyValue> artifactSkyKeyToSkyValue,
Action action,
Set<Artifact> mandatoryInputs,
SkyframeActionExecutor skyframeActionExecutor) {
this.skyKeyToArtifactSet = skyKeyToArtifactSet;
this.inputDeps = inputDeps;
this.artifactSkyKeyToSkyValue = artifactSkyKeyToSkyValue;
this.action = action;
this.mandatoryInputs = mandatoryInputs;
this.skyframeActionExecutor = skyframeActionExecutor;
}
/**
* Go through the list of evaluated SkyKeys and handle any exception that arises, taking into
* account whether the corresponding artifact(s) is a mandatory input.
*
* <p>This also updates ArtifactNestedSetFunction#skyKeyToSkyValue if an Artifact's value is
* non-null.
*
* @throws ActionExecutionException if the eval of any mandatory artifact threw an exception.
*/
void accumulateAndThrowExceptions() throws ActionExecutionException {
for (SkyKey key : inputDeps.keySet()) {
try {
SkyValue value = inputDeps.get(key).get();
if (key instanceof ArtifactNestedSetKey || value == null) {
continue;
}
artifactSkyKeyToSkyValue.put(key, value);
} catch (IOException e) {
for (Artifact input : skyKeyToArtifactSet.get(key)) {
handleIOException(input, e);
}
} catch (ActionExecutionException e) {
for (Artifact input : skyKeyToArtifactSet.get(key)) {
handleActionExecutionException(input, e);
}
} catch (ArtifactNestedSetEvalException e) {
for (Pair<SkyKey, Exception> skyKeyAndException : e.getNestedExceptions().toList()) {
SkyKey skyKey = skyKeyAndException.getFirst();
Exception inputException = skyKeyAndException.getSecond();
Preconditions.checkState(
inputException instanceof IOException
|| inputException instanceof ActionExecutionException,
"Unexpected exception type: %s, key: %s",
inputException,
skyKey);
for (Artifact input : skyKeyToArtifactSet.get(skyKey)) {
if (inputException instanceof IOException) {
handleIOException(input, (IOException) inputException);
} else {
handleActionExecutionException(input, (ActionExecutionException) inputException);
}
}
}
}
}
maybeThrowException();
}
void accumulateMissingFileArtifactValue(Artifact input, MissingFileArtifactValue value) {
missingArtifactCauses.add(handleMissingFile(input, value, action.getOwner().getLabel()));
}
/** @throws ActionExecutionException if there is any accumulated exception from the inputs. */
void maybeThrowException() throws ActionExecutionException {
// We need to rethrow the first exception because it can contain a useful error message.
if (firstActionExecutionException != null) {
if (missingArtifactCauses.isEmpty()
&& (checkNotNull(transitiveCauses, action).size() == 1)) {
// In the case a single action failed, just propagate the exception upward. This avoids
// having to copy the root causes to the upwards transitive closure.
throw firstActionExecutionException;
}
NestedSetBuilder<Cause> allCauses =
NestedSetBuilder.<Cause>stableOrder().addAll(missingArtifactCauses);
transitiveCauses.forEach(allCauses::addTransitive);
throw new ActionExecutionException(
firstActionExecutionException.getMessage(),
firstActionExecutionException.getCause(),
action,
allCauses.build(),
firstActionExecutionException.isCatastrophe(),
firstActionExecutionException.getDetailedExitCode());
}
if (!missingArtifactCauses.isEmpty()) {
for (LabelCause missingInput : missingArtifactCauses) {
skyframeActionExecutor.printError(
String.format("%s: %s", action.getOwner().getLabel(), missingInput.getMessage()),
action,
null);
}
throw createMissingInputsException(action, missingArtifactCauses);
}
}
boolean isMandatory(Artifact input) {
return !input.isSourceArtifact()
|| mandatoryInputs == null
|| mandatoryInputs.contains(input);
}
private void handleActionExecutionException(Artifact input, ActionExecutionException e) {
if (isMandatory(input)) {
// Prefer a catastrophic exception as the one we propagate.
if (firstActionExecutionException == null
|| (!firstActionExecutionException.isCatastrophe() && e.isCatastrophe())) {
firstActionExecutionException = e;
}
transitiveCauses.add(e.getRootCauses());
}
}
private void handleIOException(Artifact input, IOException e) {
if (!input.isSourceArtifact()) {
BugReport.sendBugReport(
new IllegalStateException(
"Unexpected IOException for generated artifact: " + input + ", " + action, e));
}
if (isMandatory(input)) {
missingArtifactCauses.add(
handleMissingFile(
input,
ArtifactFunction.makeMissingInputFileValue(input, e),
action.getOwner().getLabel()));
}
}
}
private static ActionExecutionException createMissingInputsException(
Action action, List<LabelCause> missingArtifactCauses) {
String message = missingArtifactCauses.size() + " input file(s) do not exist";
Code detailedCode = Code.ACTION_INPUT_FILES_MISSING;
return new ActionExecutionException(
message,
action,
NestedSetBuilder.wrap(Order.STABLE_ORDER, missingArtifactCauses),
/*catastrophe=*/ false,
createDetailedExitCode(message, detailedCode));
}
private static DetailedExitCode createDetailedExitCode(String message, Code detailedCode) {
return DetailedExitCode.of(
FailureDetail.newBuilder()
.setMessage(message)
.setExecution(Execution.newBuilder().setCode(detailedCode))
.build());
}
}
|
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.service.plugins.validators.authorization;
import com.thoughtworks.go.config.PluginRoleConfig;
import com.thoughtworks.go.config.exceptions.RecordNotFoundException;
import com.thoughtworks.go.domain.config.ConfigurationProperty;
import com.thoughtworks.go.plugin.access.authorization.AuthorizationExtension;
import com.thoughtworks.go.plugin.api.response.validation.ValidationError;
import com.thoughtworks.go.plugin.api.response.validation.ValidationResult;
public class RoleConfigurationValidator {
private final AuthorizationExtension authorizationExtension;
public RoleConfigurationValidator(AuthorizationExtension authorizationExtension) {
this.authorizationExtension = authorizationExtension;
}
public void validate(PluginRoleConfig role, String pluginId) {
try {
ValidationResult result = authorizationExtension.validateRoleConfiguration(pluginId, role.getConfigurationAsMap(true));
if (!result.isSuccessful()) {
for (ValidationError error : result.getErrors()) {
ConfigurationProperty property = role.getProperty(error.getKey());
if (property == null) {
role.addNewConfiguration(error.getKey(), false);
property = role.getProperty(error.getKey());
}
property.addError(error.getKey(), error.getMessage());
}
}
} catch (RecordNotFoundException e) {
role.addError("pluginRole", String.format("Unable to validate `pluginRole` configuration, missing plugin: %s", pluginId));
}
}
}
|
/* org.agiso.castor.core.ICellularNhood (23-12-2018)
*
* ICellularNhood.java
*
* Copyright 2018 agiso.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.agiso.castor.core;
/**
* Cellular automaton neighborhood interface.
*
* @author Karol Kopacz
* @since 1.0
*/
public interface ICellularNhood {
public int[][] getCoordinates();
}
|
package com.genesis.origin.provider;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
@SpringBootApplication
@EnableAutoConfiguration(exclude={DataSourceAutoConfiguration.class})
@EnableSwagger2
@EnableDiscoveryClient
public class OriginProviderApplication {
public static void main(String[] args) {
SpringApplication.run(OriginProviderApplication.class, args);
}
}
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.worklink.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/worklink-2018-09-25/AssociateWebsiteCertificateAuthority"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AssociateWebsiteCertificateAuthorityRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ARN of the fleet.
* </p>
*/
private String fleetArn;
/**
* <p>
* The root certificate of the CA.
* </p>
*/
private String certificate;
/**
* <p>
* The certificate name to display.
* </p>
*/
private String displayName;
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @param fleetArn
* The ARN of the fleet.
*/
public void setFleetArn(String fleetArn) {
this.fleetArn = fleetArn;
}
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @return The ARN of the fleet.
*/
public String getFleetArn() {
return this.fleetArn;
}
/**
* <p>
* The ARN of the fleet.
* </p>
*
* @param fleetArn
* The ARN of the fleet.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociateWebsiteCertificateAuthorityRequest withFleetArn(String fleetArn) {
setFleetArn(fleetArn);
return this;
}
/**
* <p>
* The root certificate of the CA.
* </p>
*
* @param certificate
* The root certificate of the CA.
*/
public void setCertificate(String certificate) {
this.certificate = certificate;
}
/**
* <p>
* The root certificate of the CA.
* </p>
*
* @return The root certificate of the CA.
*/
public String getCertificate() {
return this.certificate;
}
/**
* <p>
* The root certificate of the CA.
* </p>
*
* @param certificate
* The root certificate of the CA.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociateWebsiteCertificateAuthorityRequest withCertificate(String certificate) {
setCertificate(certificate);
return this;
}
/**
* <p>
* The certificate name to display.
* </p>
*
* @param displayName
* The certificate name to display.
*/
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
/**
* <p>
* The certificate name to display.
* </p>
*
* @return The certificate name to display.
*/
public String getDisplayName() {
return this.displayName;
}
/**
* <p>
* The certificate name to display.
* </p>
*
* @param displayName
* The certificate name to display.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public AssociateWebsiteCertificateAuthorityRequest withDisplayName(String displayName) {
setDisplayName(displayName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFleetArn() != null)
sb.append("FleetArn: ").append(getFleetArn()).append(",");
if (getCertificate() != null)
sb.append("Certificate: ").append(getCertificate()).append(",");
if (getDisplayName() != null)
sb.append("DisplayName: ").append(getDisplayName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof AssociateWebsiteCertificateAuthorityRequest == false)
return false;
AssociateWebsiteCertificateAuthorityRequest other = (AssociateWebsiteCertificateAuthorityRequest) obj;
if (other.getFleetArn() == null ^ this.getFleetArn() == null)
return false;
if (other.getFleetArn() != null && other.getFleetArn().equals(this.getFleetArn()) == false)
return false;
if (other.getCertificate() == null ^ this.getCertificate() == null)
return false;
if (other.getCertificate() != null && other.getCertificate().equals(this.getCertificate()) == false)
return false;
if (other.getDisplayName() == null ^ this.getDisplayName() == null)
return false;
if (other.getDisplayName() != null && other.getDisplayName().equals(this.getDisplayName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFleetArn() == null) ? 0 : getFleetArn().hashCode());
hashCode = prime * hashCode + ((getCertificate() == null) ? 0 : getCertificate().hashCode());
hashCode = prime * hashCode + ((getDisplayName() == null) ? 0 : getDisplayName().hashCode());
return hashCode;
}
@Override
public AssociateWebsiteCertificateAuthorityRequest clone() {
return (AssociateWebsiteCertificateAuthorityRequest) super.clone();
}
}
|
package annin.my.android.popularmovies2.data;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
/**
* Created by Maino96-10022 on 9/6/2017.
*/
public class MovieDbHelper //extends SQLiteOpenHelper
{
// // The name of the database
// private static final String DATABASE_NAME = "tasksDb.db";
//
// // If you change the database schema, you must increment the database version
// private static final int VERSION = 10;
//
// /**
// * Constructs a new instance of {@link MovieDbHelper}.
// *
// * @param context of the app
// */
// MovieDbHelper(Context context)
// {
// super(context, DATABASE_NAME, null, VERSION);
// }
//
// /**
// * Called when the movies database is created for the first time.
// */
// @Override
// public void onCreate(SQLiteDatabase db)
// {
// // Create movies table (careful to follow SQL formatting rules)
// final String CREATE_TABLE = "CREATE TABLE " + MovieContract.MovieEntry.TABLE_NAME + " (" +
// MovieContract.MovieEntry._ID + " INTEGER PRIMARY KEY , " +
// MovieContract.MovieEntry.COLUMN_MOVIES_ID + " TEXT NOT NULL , " +
// MovieContract.MovieEntry.COLUMN_MOVIES_TITLE + " TEXT NOT NULL, " +
// MovieContract.MovieEntry.COLUMN_MOVIES_OVERVIEW + " TEXT NOT NULL, " +
// MovieContract.MovieEntry.COLUMN_MOVIES_VOTE + " TEXT NOT NULL, " +
// MovieContract.MovieEntry.COLUMN_MOVIES_DATE + " TEXT NOT NULL, " +
// MovieContract.MovieEntry.COLUMN_MOVIES_POSTER_PATH + " TEXT NOT NULL, " +
//
// " UNIQUE (" + MovieContract.MovieEntry.COLUMN_MOVIES_TITLE + ") ON CONFLICT REPLACE);";
//
// db.execSQL(CREATE_TABLE);
// }
//
// /**
// * This method discards the old table of data and calls onCreate to recreate a new one.
// * This only occurs when the version number for this database (DATABASE_VERSION) is incremented.
// */
// @Override
// public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion)
// {
// db.execSQL("ALTER TABLE " + MovieContract.MovieEntry.TABLE_NAME);
// onCreate(db);
// }
}
|
package com.github.nikolaymakhonin.utils.serialization;
import com.github.nikolaymakhonin.logger.Log;
import com.github.nikolaymakhonin.utils.RefParam;
import com.github.nikolaymakhonin.utils.strings.CharsetUtils;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
public class StreamSerializerUtils {
private static final int currentVersion = 3;
// MAX_COMPRESSION_LEVEL
public static boolean SaveToByteArray(final IStreamSerializable obj, final RefParam<byte[]> buffer) {
return SaveToByteArray(obj, buffer, CompressMethod.None, 0, null, null, null);
}
public static boolean SaveToByteArray(final IStreamSerializable obj, final RefParam<byte[]> buffer,
final CompressMethod compressMethod, final int compressionLevel, final String cryptKey, final String cryptIV,
final Charset cryptEncoding) {
try {
final ByteArrayOutputStream memStream = new ByteArrayOutputStream();
if (SaveToStream(obj, memStream, compressMethod, compressionLevel, cryptKey, cryptIV, cryptEncoding)) {
buffer.value = memStream.toByteArray();
memStream.close();
return true;
}
buffer.value = null;
memStream.close();
return false;
} catch (final IOException e) {
Log.e("StreamSerializerUtils", "", e);
buffer.value = null;
return false;
}
}
// public static boolean SaveToFile(final IStreamSerializable obj, final String filePath, final CompressMethod
// compressMethod = CompressMethod.None, int compressionLevel = mika.utils.Serialization.MAX_COMPRESSION_LEVEL,
// String cryptKey = null, String cryptIV = null, Charset cryptEncoding = null)
// {
// using (final Stream fileStream = FileUtils.FileOpenWrite(filePath))
// {
// return SaveToStream(obj, fileStream, compressMethod, compressionLevel, cryptKey, cryptIV, cryptEncoding);
// }
// }
// CompressionUtils.MAX_COMPRESSION_LEVEL
public static boolean SaveToStream(final IStreamSerializable obj, final OutputStream stream, final CompressMethod compressMethod, final int compressionLevel, final String cryptKey , final String cryptIV, final Charset cryptEncoding)
{
try
{
final BinaryWriter writer = new BinaryWriter(stream);
writer.write(currentVersion);
switch (currentVersion)
{
case 3:
writer.write(compressMethod.ordinal());
writer.write(compressionLevel);
final boolean encrypt = cryptKey != null && cryptIV != null;
writer.write(encrypt);
if (compressMethod == CompressMethod.None && !encrypt)
{
obj.Serialize(writer);
writer.flush();
}
else
{
writer.flush();
final ByteArrayOutputStream bufferOutputStream = new ByteArrayOutputStream();
final BinaryWriter writer2 = new BinaryWriter(bufferOutputStream);
obj.Serialize(writer2);
writer2.flush();
ByteArrayInputStream bufferInputStream = new ByteArrayInputStream(bufferOutputStream.toByteArray());
if (compressMethod != CompressMethod.None)
{
if (encrypt)
{
final ByteArrayOutputStream packStream = new ByteArrayOutputStream();
final long result = CompressionUtils.Pack(bufferInputStream, packStream, compressMethod, compressionLevel);
if (result < 0) {
return false;
}
bufferInputStream = new ByteArrayInputStream(packStream.toByteArray());
}
else
{
final long result = CompressionUtils.Pack(bufferInputStream, stream, compressMethod, compressionLevel);
if (result < 0) {
return false;
}
}
}
if (encrypt)
{
final boolean result = CryptUtils.Crypt(CryptMode.Encrypt, bufferInputStream, stream, cryptEncoding != null ? cryptEncoding : CharsetUtils.UTF8, cryptKey, cryptIV, false);
if (!result) {
return false;
}
}
}
break;
}
}
catch (final Exception exception)
{
Log.e("UnknownLogTag", "", exception);
return false;
}
return true;
}
public static boolean LoadFromByteArray(final IStreamSerializable obj, final byte[] buffer) {
return LoadFromByteArray(obj, buffer, null, null);
}
/** @param cryptKey
* - maxUsedLength = 24 bytes
* @param cryptIV
* - maxUsedLength = 8 bytes */
public static boolean LoadFromByteArray(final IStreamSerializable obj, final byte[] buffer, final String cryptKey,
final String cryptIV) {
try {
if (buffer == null) {
Log.e("UnknownLogTag", "buffer == null");
return false;
}
final ByteArrayInputStream memStream = new ByteArrayInputStream(buffer);
final boolean result = loadFromStream(obj, memStream, cryptKey, cryptIV, null);
memStream.close();
return result;
} catch (final IOException e) {
Log.e("StreamSerializerUtils", "", e);
return false;
}
}
// public static boolean LoadFromFile(final IStreamSerializable obj, final String filePath)
// {
// return LoadFromFile(obj, filePath, null, null);
// }
//
// public static boolean LoadFromFile(final IStreamSerializable obj, final String filePath, final String cryptKey =
// null, String cryptIV = null)
// {
// using (final Stream fileStream = FileUtils.FileOpenRead(filePath))
// {
// return loadFromStream( obj, fileStream, null);
// }
// }
public static boolean LoadFromStream(final IStreamSerializable obj, final ByteArrayInputStream stream,
final String cryptKey, final String cryptIV, final Charset cryptEncoding) {
return loadFromStream(obj, stream, cryptKey, cryptIV, cryptEncoding);
}
public static boolean LoadFromStream(final IStreamSerializable obj, final ByteArrayInputStream stream) {
return loadFromStream(obj, stream, null, null, null);
}
private static boolean loadFromStream( final IStreamSerializable obj, InputStream stream, final String cryptKey, final String cryptIV, final Charset cryptEncoding)
{
try
{
BinaryReader reader = new BinaryReader(stream);
final int version = reader.readInt();
CompressMethod compressMethod;
switch (version)
{
case 3:
compressMethod = CompressMethod.values()[reader.readInt()];
final int compressionLevel = reader.readInt();
final boolean encrypt = reader.readBoolean();
if (compressMethod != CompressMethod.None || encrypt)
{
if (encrypt)
{
final ByteArrayOutputStream decryptStream = new ByteArrayOutputStream();
final boolean result = CryptUtils.Crypt(CryptMode.Decrypt, stream, decryptStream, cryptEncoding != null ? cryptEncoding : CharsetUtils.UTF8, cryptKey, cryptIV, false);
if (!result) {
return false;
}
stream = new ByteArrayInputStream(decryptStream.toByteArray());
}
if (compressMethod != CompressMethod.None)
{
final ByteArrayOutputStream unPackStream = new ByteArrayOutputStream();
final long result = CompressionUtils.UnPack(stream, unPackStream, compressMethod, compressionLevel);
if (result < 0) {
return false;
}
stream = new ByteArrayInputStream(unPackStream.toByteArray());
}
reader = new BinaryReader(stream);
}
obj.DeSerialize(reader);
break;
default:
Log.e("UnknownLogTag", "DeSerialize unavailable version: " + version);
return false;
}
}
catch (final Exception exception)
{
Log.e("UnknownLogTag", "", exception);
return false;
}
return true;
}
}
|
package com.google.appinventor.components.runtime;
import android.content.ContentResolver;
import android.content.Context;
import android.database.Cursor;
import android.database.DatabaseUtils;
import android.net.Uri;
import android.provider.Contacts;
import android.text.TextUtils;
import android.text.util.Rfc822Token;
import android.view.View;
import android.widget.ResourceCursorAdapter;
import android.widget.TextView;
import com.google.appinventor.components.runtime.util.HoneycombMR1Util;
import com.google.appinventor.components.runtime.util.SdkLevel;
/* loaded from: classes.dex */
public class EmailAddressAdapter extends ResourceCursorAdapter {
private static final boolean DEBUG = false;
public static final int PRE_HONEYCOMB_DATA_INDEX = 2;
public static final int PRE_HONEYCOMB_NAME_INDEX = 1;
private static String SORT_ORDER = null;
private static final String TAG = "EmailAddressAdapter";
private ContentResolver contentResolver;
private Context context;
private static final String[] PRE_HONEYCOMB_PROJECTION = {"_id", "name", "data"};
private static final String[] POST_HONEYCOMB_PROJECTION = HoneycombMR1Util.getEmailAdapterProjection();
public EmailAddressAdapter(Context context) {
super(context, 17367050, null);
this.contentResolver = context.getContentResolver();
this.context = context;
if (SdkLevel.getLevel() >= 12) {
SORT_ORDER = HoneycombMR1Util.getTimesContacted() + " DESC, " + HoneycombMR1Util.getDisplayName();
} else {
SORT_ORDER = "times_contacted DESC, name";
}
}
@Override // android.widget.CursorAdapter
public final String convertToString(Cursor cursor) {
String name;
String address;
int POST_HONEYCOMB_NAME_INDEX = cursor.getColumnIndex(HoneycombMR1Util.getDisplayName());
int POST_HONEYCOMB_EMAIL_INDEX = cursor.getColumnIndex(HoneycombMR1Util.getEmailAddress());
if (SdkLevel.getLevel() >= 12) {
name = cursor.getString(POST_HONEYCOMB_NAME_INDEX);
address = cursor.getString(POST_HONEYCOMB_EMAIL_INDEX);
} else {
name = cursor.getString(1);
address = cursor.getString(2);
}
return new Rfc822Token(name, address, null).toString();
}
private final String makeDisplayString(Cursor cursor) {
String name;
String address;
int POST_HONEYCOMB_NAME_INDEX = cursor.getColumnIndex(HoneycombMR1Util.getDisplayName());
int POST_HONEYCOMB_EMAIL_INDEX = cursor.getColumnIndex(HoneycombMR1Util.getEmailAddress());
StringBuilder s = new StringBuilder();
boolean flag = DEBUG;
if (SdkLevel.getLevel() >= 12) {
name = cursor.getString(POST_HONEYCOMB_NAME_INDEX);
address = cursor.getString(POST_HONEYCOMB_EMAIL_INDEX);
} else {
name = cursor.getString(1);
address = cursor.getString(2);
}
if (!TextUtils.isEmpty(name)) {
s.append(name);
flag = true;
}
if (flag) {
s.append(" <");
}
s.append(address);
if (flag) {
s.append(">");
}
return s.toString();
}
@Override // android.widget.CursorAdapter
public final void bindView(View view, Context context, Cursor cursor) {
((TextView) view).setText(makeDisplayString(cursor));
}
@Override // android.widget.CursorAdapter
public Cursor runQueryOnBackgroundThread(CharSequence constraint) {
Uri db = null;
StringBuilder s = new StringBuilder();
if (constraint != null) {
String filter = DatabaseUtils.sqlEscapeString(constraint.toString() + '%');
if (SdkLevel.getLevel() >= 12) {
db = HoneycombMR1Util.getDataContentUri();
s.append("(" + HoneycombMR1Util.getDataMimeType() + "='" + HoneycombMR1Util.getEmailType() + "')");
s.append(" AND ");
s.append("(display_name LIKE ");
s.append(filter);
s.append(")");
} else {
db = Contacts.ContactMethods.CONTENT_EMAIL_URI;
s.append("(name LIKE ");
s.append(filter);
s.append(") OR (display_name LIKE ");
s.append(filter);
s.append(")");
}
}
String where = s.toString();
return SdkLevel.getLevel() >= 12 ? this.contentResolver.query(db, POST_HONEYCOMB_PROJECTION, where, null, SORT_ORDER) : this.contentResolver.query(db, PRE_HONEYCOMB_PROJECTION, where, null, SORT_ORDER);
}
}
|
//
// WebConfig.java
// produk-api-service
//
// Created by Agung Pramono on 25/10/2016
// Copyright (c) 2016 Java Development. All rights reserved.
//
package com.agung.produk.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.ViewControllerRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurerAdapter;
/**
*
*/
@Configuration
public class WebConfig extends WebMvcConfigurerAdapter{
@Override
public void addViewControllers(ViewControllerRegistry registry){
registry.addViewController("/produk/listproduk").setViewName("/produk/listproduk");
registry.addViewController("/halo/halo").setViewName("/halo/halo");
//registry.addViewController("/error").setViewName("/error");
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.samples.events;
import org.apache.logging.log4j.samples.dto.Constraint;
/**
* Member requested transfer.
*
* @author generated
*/
public interface Transfer extends org.apache.logging.log4j.samples.dto.AuditEvent {
/**
* Amount : Amount of transaction in dollars.
*
* @param amount Amount of transaction in dollars.
*/
@Constraint(required = true)
void setAmount(String amount);
/**
* Comment : Comment
*
* @param comment Comment
*/
void setComment(String comment);
/**
* Completion Status : Whether the event succeeded or failed - success/failure and optional reason.
*
* @param completionStatus Whether the event succeeded or failed - success/failure and optional reason.
*/
void setCompletionStatus(String completionStatus);
/**
* Confirmation Number : A date, time, and reference number.
*
* @param confNo A date, time, and reference number.
*/
void setConfNo(String confNo);
/**
* From Account : For transfer or other transaction, the account funds are taken from.
*
* @param fromAccount For transfer or other transaction, the account funds are taken from.
*/
@Constraint(required = true)
void setFromAccount(String fromAccount);
/**
* From Account Type : For transfer or other transaction, the type of the account funds are taken from.
*
* @param fromAccountType For transfer or other transaction, the type of the account funds are taken from.
*/
void setFromAccountType(String fromAccountType);
/**
* From Routing Number : nine digit bank code to ID the FI from which funds are taken
*
* @param fromRoutingNumber nine digit bank code to ID the FI from which funds are taken
*/
void setFromRoutingNumber(String fromRoutingNumber);
/**
* Member : Member or End User number at the Host
*
* @param member Member or End User number at the Host
*/
@Constraint(required = true)
void setMember(String member);
/**
* Memo : Descriptive text or memo for transaction
*
* @param memo Descriptive text or memo for transaction
*/
void setMemo(String memo);
/**
* Payment : Amount paid or transferred.
*
* @param payment Amount paid or transferred.
*/
void setPayment(String payment);
/**
* Reference : Unique reference number or identifier for transfers.
*
* @param reference Unique reference number or identifier for transfers.
*/
void setReference(String reference);
/**
* Result : Status of request event.
*
* @param result Status of request event.
*/
void setResult(String result);
/**
* Source : Source of the End User's request; or method user used to navigate (link, button)
*
* @param source Source of the End User's request; or method user used to navigate (link, button)
*/
void setSource(String source);
/**
* Status : Status of promotional email preference or online statement.
*
* @param status Status of promotional email preference or online statement.
*/
void setStatus(String status);
/**
* Time Stamp : Time Stamp of event.
*
* @param timeStamp Time Stamp of event.
*/
void setTimeStamp(long timeStamp);
/**
* To Account : Target account or account that will receive funds in a transfer.
*
* @param toAccount Target account or account that will receive funds in a transfer.
*/
@Constraint(required = true)
void setToAccount(String toAccount);
/**
* To Account Type : Target account type such as savings or checking.
*
* @param toAccountType Target account type such as savings or checking.
*/
void setToAccountType(String toAccountType);
/**
* To Routing Number : nine digit bank code to ID the FI to receive funds
*
* @param toRoutingNumber nine digit bank code to ID the FI to receive funds
*/
void setToRoutingNumber(String toRoutingNumber);
/**
* Transaction Type : Type of transfer, i.e., withdrawal, LOC Paydown, CD redemption, lucky transfer.
*
* @param transactionType Type of transfer, i.e., withdrawal, LOC Paydown, CD redemption, lucky transfer.
*/
void setTransactionType(String transactionType);
/**
* Type : Type of event, bill payment, balance, application, or attribute. For bill pay, type of payment (check, electronic)
*
* @param type Type of event, bill payment, balance, application, or attribute. For bill pay, type of payment (check, electronic)
*/
void setType(String type);
}
|
public class Owner_Address_For_Owner_Profile {
String Area ;
String Sector;
String RodeNumber;
String HouseNumber;
public Owner_Address_For_Owner_Profile() {
}
public Owner_Address_For_Owner_Profile(String Area,String Sector,String RodeNumber,String HouseNumber) {
this.Area = Area;
this.Sector = Sector;
this.RodeNumber = RodeNumber;
this.HouseNumber = HouseNumber;
System.out.println(this.Area +" "+ this.Sector+" " + this.RodeNumber +" "+this.HouseNumber+" K" );
}
public String getArea() {
return Area;
}
public String getSector() {
return Sector;
}
public String getRodeNumber() {
return RodeNumber;
}
public String getHouseNumber() {
return HouseNumber;
}
}
|
package org.wikipedia.edit;
import androidx.annotation.Nullable;
import org.wikipedia.dataclient.mwapi.MwPostResponse;
public class Edit extends MwPostResponse {
@SuppressWarnings("unused,") @Nullable private Result edit;
@Nullable public Result edit() {
return edit;
}
public boolean hasEditResult() {
return edit != null;
}
public static class Result {
@SuppressWarnings("unused") @Nullable private String result;
@SuppressWarnings("unused") private long newrevid;
@SuppressWarnings("unused") @Nullable private Captcha captcha;
@SuppressWarnings("unused") @Nullable private String code;
@SuppressWarnings("unused") @Nullable private String info;
@SuppressWarnings("unused") @Nullable private String warning;
@SuppressWarnings("unused") @Nullable private String spamblacklist;
@Nullable public String status() {
return result;
}
public long newRevId() {
return newrevid;
}
public boolean editSucceeded() {
return "Success".equals(result);
}
@Nullable public String captchaId() {
return captcha == null ? null : captcha.id();
}
public boolean hasEditErrorCode() {
return code != null;
}
public boolean hasCaptchaResponse() {
return captcha != null;
}
@Nullable public String code() {
return code;
}
@Nullable public String info() {
return info;
}
@Nullable public String warning() {
return warning;
}
@Nullable public String spamblacklist() {
return spamblacklist;
}
public boolean hasSpamBlacklistResponse() {
return spamblacklist != null;
}
}
private static class Captcha {
@SuppressWarnings("unused") @Nullable private String id;
@Nullable public String id() {
return id;
}
}
}
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.tree;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
import static com.google.common.base.MoreObjects.toStringHelper;
import static java.util.Objects.requireNonNull;
public class GroupBy
extends Node
{
private final boolean isDistinct;
private final List<GroupingElement> groupingElements;
public GroupBy(boolean isDistinct, List<GroupingElement> groupingElements)
{
this(Optional.empty(), isDistinct, groupingElements);
}
public GroupBy(NodeLocation location, boolean isDistinct, List<GroupingElement> groupingElements)
{
this(Optional.of(location), isDistinct, groupingElements);
}
private GroupBy(Optional<NodeLocation> location, boolean isDistinct, List<GroupingElement> groupingElements)
{
super(location);
this.isDistinct = isDistinct;
this.groupingElements = ImmutableList.copyOf(requireNonNull(groupingElements));
}
public boolean isDistinct()
{
return isDistinct;
}
public List<GroupingElement> getGroupingElements()
{
return groupingElements;
}
@Override
protected <R, C> R accept(AstVisitor<R, C> visitor, C context)
{
return visitor.visitGroupBy(this, context);
}
@Override
public boolean equals(Object o)
{
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GroupBy groupBy = (GroupBy) o;
return isDistinct == groupBy.isDistinct &&
Objects.equals(groupingElements, groupBy.groupingElements);
}
@Override
public int hashCode()
{
return Objects.hash(isDistinct, groupingElements);
}
@Override
public String toString()
{
return toStringHelper(this)
.add("isDistinct", isDistinct)
.add("groupingElements", groupingElements)
.toString();
}
}
|
/*===========================================================================
* Licensed Materials - Property of IBM
* "Restricted Materials of IBM"
*
* IBM SDK, Java(tm) Technology Edition, v8
* (C) Copyright IBM Corp. 2012, 2013. All Rights Reserved
*
* US Government Users Restricted Rights - Use, duplication or disclosure
* restricted by GSA ADP Schedule Contract with IBM Corp.
*===========================================================================
*/
/*
* Copyright (c) 2012, 2013, Oracle and/or its affiliates. All rights reserved.
* ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*
*/
/*
* Copyright (c) 2012, Stephen Colebourne & Michael Nascimento Santos
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of JSR-310 nor the names of its contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package java.time.chrono;
import static java.time.chrono.MinguoChronology.YEARS_DIFFERENCE;
import static java.time.temporal.ChronoField.DAY_OF_MONTH;
import static java.time.temporal.ChronoField.MONTH_OF_YEAR;
import static java.time.temporal.ChronoField.YEAR;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.time.Clock;
import java.time.DateTimeException;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.Period;
import java.time.ZoneId;
import java.time.temporal.ChronoField;
import java.time.temporal.TemporalAccessor;
import java.time.temporal.TemporalAdjuster;
import java.time.temporal.TemporalAmount;
import java.time.temporal.TemporalField;
import java.time.temporal.TemporalQuery;
import java.time.temporal.TemporalUnit;
import java.time.temporal.UnsupportedTemporalTypeException;
import java.time.temporal.ValueRange;
import java.util.Objects;
/**
* A date in the Minguo calendar system.
* <p>
* This date operates using the {@linkplain MinguoChronology Minguo calendar}.
* This calendar system is primarily used in the Republic of China, often known as Taiwan.
* Dates are aligned such that {@code 0001-01-01 (Minguo)} is {@code 1912-01-01 (ISO)}.
*
* <p>
* This is a <a href="{@docRoot}/java/lang/doc-files/ValueBased.html">value-based</a>
* class; use of identity-sensitive operations (including reference equality
* ({@code ==}), identity hash code, or synchronization) on instances of
* {@code MinguoDate} may have unpredictable results and should be avoided.
* The {@code equals} method should be used for comparisons.
*
* @implSpec
* This class is immutable and thread-safe.
*
* @since 1.8
*/
public final class MinguoDate
extends ChronoLocalDateImpl<MinguoDate>
implements ChronoLocalDate, Serializable {
/**
* Serialization version.
*/
private static final long serialVersionUID = 1300372329181994526L;
/**
* The underlying date.
*/
private final transient LocalDate isoDate;
//-----------------------------------------------------------------------
/**
* Obtains the current {@code MinguoDate} from the system clock in the default time-zone.
* <p>
* This will query the {@link Clock#systemDefaultZone() system clock} in the default
* time-zone to obtain the current date.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @return the current date using the system clock and default time-zone, not null
*/
public static MinguoDate now() {
return now(Clock.systemDefaultZone());
}
/**
* Obtains the current {@code MinguoDate} from the system clock in the specified time-zone.
* <p>
* This will query the {@link Clock#system(ZoneId) system clock} to obtain the current date.
* Specifying the time-zone avoids dependence on the default time-zone.
* <p>
* Using this method will prevent the ability to use an alternate clock for testing
* because the clock is hard-coded.
*
* @param zone the zone ID to use, not null
* @return the current date using the system clock, not null
*/
public static MinguoDate now(ZoneId zone) {
return now(Clock.system(zone));
}
/**
* Obtains the current {@code MinguoDate} from the specified clock.
* <p>
* This will query the specified clock to obtain the current date - today.
* Using this method allows the use of an alternate clock for testing.
* The alternate clock may be introduced using {@linkplain Clock dependency injection}.
*
* @param clock the clock to use, not null
* @return the current date, not null
* @throws DateTimeException if the current date cannot be obtained
*/
public static MinguoDate now(Clock clock) {
return new MinguoDate(LocalDate.now(clock));
}
/**
* Obtains a {@code MinguoDate} representing a date in the Minguo calendar
* system from the proleptic-year, month-of-year and day-of-month fields.
* <p>
* This returns a {@code MinguoDate} with the specified fields.
* The day must be valid for the year and month, otherwise an exception will be thrown.
*
* @param prolepticYear the Minguo proleptic-year
* @param month the Minguo month-of-year, from 1 to 12
* @param dayOfMonth the Minguo day-of-month, from 1 to 31
* @return the date in Minguo calendar system, not null
* @throws DateTimeException if the value of any field is out of range,
* or if the day-of-month is invalid for the month-year
*/
public static MinguoDate of(int prolepticYear, int month, int dayOfMonth) {
return new MinguoDate(LocalDate.of(prolepticYear + YEARS_DIFFERENCE, month, dayOfMonth));
}
/**
* Obtains a {@code MinguoDate} from a temporal object.
* <p>
* This obtains a date in the Minguo calendar system based on the specified temporal.
* A {@code TemporalAccessor} represents an arbitrary set of date and time information,
* which this factory converts to an instance of {@code MinguoDate}.
* <p>
* The conversion typically uses the {@link ChronoField#EPOCH_DAY EPOCH_DAY}
* field, which is standardized across calendar systems.
* <p>
* This method matches the signature of the functional interface {@link TemporalQuery}
* allowing it to be used as a query via method reference, {@code MinguoDate::from}.
*
* @param temporal the temporal object to convert, not null
* @return the date in Minguo calendar system, not null
* @throws DateTimeException if unable to convert to a {@code MinguoDate}
*/
public static MinguoDate from(TemporalAccessor temporal) {
return MinguoChronology.INSTANCE.date(temporal);
}
//-----------------------------------------------------------------------
/**
* Creates an instance from an ISO date.
*
* @param isoDate the standard local date, validated not null
*/
MinguoDate(LocalDate isoDate) {
Objects.requireNonNull(isoDate, "isoDate");
this.isoDate = isoDate;
}
//-----------------------------------------------------------------------
/**
* Gets the chronology of this date, which is the Minguo calendar system.
* <p>
* The {@code Chronology} represents the calendar system in use.
* The era and other fields in {@link ChronoField} are defined by the chronology.
*
* @return the Minguo chronology, not null
*/
@Override
public MinguoChronology getChronology() {
return MinguoChronology.INSTANCE;
}
/**
* Gets the era applicable at this date.
* <p>
* The Minguo calendar system has two eras, 'ROC' and 'BEFORE_ROC',
* defined by {@link MinguoEra}.
*
* @return the era applicable at this date, not null
*/
@Override
public MinguoEra getEra() {
return (getProlepticYear() >= 1 ? MinguoEra.ROC : MinguoEra.BEFORE_ROC);
}
/**
* Returns the length of the month represented by this date.
* <p>
* This returns the length of the month in days.
* Month lengths match those of the ISO calendar system.
*
* @return the length of the month in days
*/
@Override
public int lengthOfMonth() {
return isoDate.lengthOfMonth();
}
//-----------------------------------------------------------------------
@Override
public ValueRange range(TemporalField field) {
if (field instanceof ChronoField) {
if (isSupported(field)) {
ChronoField f = (ChronoField) field;
switch (f) {
case DAY_OF_MONTH:
case DAY_OF_YEAR:
case ALIGNED_WEEK_OF_MONTH:
return isoDate.range(field);
case YEAR_OF_ERA: {
ValueRange range = YEAR.range();
long max = (getProlepticYear() <= 0 ? -range.getMinimum() + 1 + YEARS_DIFFERENCE : range.getMaximum() - YEARS_DIFFERENCE);
return ValueRange.of(1, max);
}
}
return getChronology().range(f);
}
throw new UnsupportedTemporalTypeException("Unsupported field: " + field);
}
return field.rangeRefinedBy(this);
}
@Override
public long getLong(TemporalField field) {
if (field instanceof ChronoField) {
switch ((ChronoField) field) {
case PROLEPTIC_MONTH:
return getProlepticMonth();
case YEAR_OF_ERA: {
int prolepticYear = getProlepticYear();
return (prolepticYear >= 1 ? prolepticYear : 1 - prolepticYear);
}
case YEAR:
return getProlepticYear();
case ERA:
return (getProlepticYear() >= 1 ? 1 : 0);
}
return isoDate.getLong(field);
}
return field.getFrom(this);
}
private long getProlepticMonth() {
return getProlepticYear() * 12L + isoDate.getMonthValue() - 1;
}
private int getProlepticYear() {
return isoDate.getYear() - YEARS_DIFFERENCE;
}
//-----------------------------------------------------------------------
@Override
public MinguoDate with(TemporalField field, long newValue) {
if (field instanceof ChronoField) {
ChronoField f = (ChronoField) field;
if (getLong(f) == newValue) {
return this;
}
switch (f) {
case PROLEPTIC_MONTH:
getChronology().range(f).checkValidValue(newValue, f);
return plusMonths(newValue - getProlepticMonth());
case YEAR_OF_ERA:
case YEAR:
case ERA: {
int nvalue = getChronology().range(f).checkValidIntValue(newValue, f);
switch (f) {
case YEAR_OF_ERA:
return with(isoDate.withYear(getProlepticYear() >= 1 ? nvalue + YEARS_DIFFERENCE : (1 - nvalue) + YEARS_DIFFERENCE));
case YEAR:
return with(isoDate.withYear(nvalue + YEARS_DIFFERENCE));
case ERA:
return with(isoDate.withYear((1 - getProlepticYear()) + YEARS_DIFFERENCE));
}
}
}
return with(isoDate.with(field, newValue));
}
return super.with(field, newValue);
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
public MinguoDate with(TemporalAdjuster adjuster) {
return super.with(adjuster);
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
public MinguoDate plus(TemporalAmount amount) {
return super.plus(amount);
}
/**
* {@inheritDoc}
* @throws DateTimeException {@inheritDoc}
* @throws ArithmeticException {@inheritDoc}
*/
@Override
public MinguoDate minus(TemporalAmount amount) {
return super.minus(amount);
}
//-----------------------------------------------------------------------
@Override
MinguoDate plusYears(long years) {
return with(isoDate.plusYears(years));
}
@Override
MinguoDate plusMonths(long months) {
return with(isoDate.plusMonths(months));
}
@Override
MinguoDate plusWeeks(long weeksToAdd) {
return super.plusWeeks(weeksToAdd);
}
@Override
MinguoDate plusDays(long days) {
return with(isoDate.plusDays(days));
}
@Override
public MinguoDate plus(long amountToAdd, TemporalUnit unit) {
return super.plus(amountToAdd, unit);
}
@Override
public MinguoDate minus(long amountToAdd, TemporalUnit unit) {
return super.minus(amountToAdd, unit);
}
@Override
MinguoDate minusYears(long yearsToSubtract) {
return super.minusYears(yearsToSubtract);
}
@Override
MinguoDate minusMonths(long monthsToSubtract) {
return super.minusMonths(monthsToSubtract);
}
@Override
MinguoDate minusWeeks(long weeksToSubtract) {
return super.minusWeeks(weeksToSubtract);
}
@Override
MinguoDate minusDays(long daysToSubtract) {
return super.minusDays(daysToSubtract);
}
private MinguoDate with(LocalDate newDate) {
return (newDate.equals(isoDate) ? this : new MinguoDate(newDate));
}
@Override // for javadoc and covariant return type
@SuppressWarnings("unchecked")
public final ChronoLocalDateTime<MinguoDate> atTime(LocalTime localTime) {
return (ChronoLocalDateTime<MinguoDate>)super.atTime(localTime);
}
@Override
public ChronoPeriod until(ChronoLocalDate endDate) {
Period period = isoDate.until(endDate);
return getChronology().period(period.getYears(), period.getMonths(), period.getDays());
}
@Override // override for performance
public long toEpochDay() {
return isoDate.toEpochDay();
}
//-------------------------------------------------------------------------
/**
* Compares this date to another date, including the chronology.
* <p>
* Compares this {@code MinguoDate} with another ensuring that the date is the same.
* <p>
* Only objects of type {@code MinguoDate} are compared, other types return false.
* To compare the dates of two {@code TemporalAccessor} instances, including dates
* in two different chronologies, use {@link ChronoField#EPOCH_DAY} as a comparator.
*
* @param obj the object to check, null returns false
* @return true if this is equal to the other date
*/
@Override // override for performance
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof MinguoDate) {
MinguoDate otherDate = (MinguoDate) obj;
return this.isoDate.equals(otherDate.isoDate);
}
return false;
}
/**
* A hash code for this date.
*
* @return a suitable hash code based only on the Chronology and the date
*/
@Override // override for performance
public int hashCode() {
return getChronology().getId().hashCode() ^ isoDate.hashCode();
}
//-----------------------------------------------------------------------
/**
* Defend against malicious streams.
*
* @param s the stream to read
* @throws InvalidObjectException always
*/
private void readObject(ObjectInputStream s) throws InvalidObjectException {
throw new InvalidObjectException("Deserialization via serialization delegate");
}
/**
* Writes the object using a
* <a href="../../../serialized-form.html#java.time.chrono.Ser">dedicated serialized form</a>.
* @serialData
* <pre>
* out.writeByte(8); // identifies a MinguoDate
* out.writeInt(get(YEAR));
* out.writeByte(get(MONTH_OF_YEAR));
* out.writeByte(get(DAY_OF_MONTH));
* </pre>
*
* @return the instance of {@code Ser}, not null
*/
private Object writeReplace() {
return new Ser(Ser.MINGUO_DATE_TYPE, this);
}
void writeExternal(DataOutput out) throws IOException {
// MinguoChronology is implicit in the MINGUO_DATE_TYPE
out.writeInt(get(YEAR));
out.writeByte(get(MONTH_OF_YEAR));
out.writeByte(get(DAY_OF_MONTH));
}
static MinguoDate readExternal(DataInput in) throws IOException {
int year = in.readInt();
int month = in.readByte();
int dayOfMonth = in.readByte();
return MinguoChronology.INSTANCE.date(year, month, dayOfMonth);
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import static org.apache.commons.lang.StringUtils.join;
import java.io.IOException;
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import java.util.regex.Pattern;
import javax.jdo.JDODataStoreException;
import javax.jdo.JDOHelper;
import javax.jdo.JDOObjectNotFoundException;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import javax.jdo.Query;
import javax.jdo.Transaction;
import javax.jdo.datastore.DataStoreCache;
import javax.jdo.identity.IntIdentity;
import org.antlr.runtime.CommonTokenStream;
import org.antlr.runtime.RecognitionException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.common.ObjectPair;
import org.apache.hadoop.hive.common.classification.InterfaceAudience;
import org.apache.hadoop.hive.common.classification.InterfaceStability;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.metastore.api.AggrStats;
import org.apache.hadoop.hive.metastore.api.ColumnStatistics;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsDesc;
import org.apache.hadoop.hive.metastore.api.ColumnStatisticsObj;
import org.apache.hadoop.hive.metastore.api.CurrentNotificationEventId;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.metastore.api.Function;
import org.apache.hadoop.hive.metastore.api.FunctionType;
import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
import org.apache.hadoop.hive.metastore.api.HiveObjectType;
import org.apache.hadoop.hive.metastore.api.Index;
import org.apache.hadoop.hive.metastore.api.InvalidInputException;
import org.apache.hadoop.hive.metastore.api.InvalidObjectException;
import org.apache.hadoop.hive.metastore.api.InvalidPartitionException;
import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.NotificationEvent;
import org.apache.hadoop.hive.metastore.api.NotificationEventRequest;
import org.apache.hadoop.hive.metastore.api.NotificationEventResponse;
import org.apache.hadoop.hive.metastore.api.Order;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.PartitionEventType;
import org.apache.hadoop.hive.metastore.api.PrincipalPrivilegeSet;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
import org.apache.hadoop.hive.metastore.api.ResourceType;
import org.apache.hadoop.hive.metastore.api.ResourceUri;
import org.apache.hadoop.hive.metastore.api.Role;
import org.apache.hadoop.hive.metastore.api.SerDeInfo;
import org.apache.hadoop.hive.metastore.api.SkewedInfo;
import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.metastore.api.Type;
import org.apache.hadoop.hive.metastore.api.UnknownDBException;
import org.apache.hadoop.hive.metastore.api.UnknownPartitionException;
import org.apache.hadoop.hive.metastore.api.UnknownTableException;
import org.apache.hadoop.hive.metastore.model.MColumnDescriptor;
import org.apache.hadoop.hive.metastore.model.MDBPrivilege;
import org.apache.hadoop.hive.metastore.model.MDatabase;
import org.apache.hadoop.hive.metastore.model.MDelegationToken;
import org.apache.hadoop.hive.metastore.model.MFieldSchema;
import org.apache.hadoop.hive.metastore.model.MFunction;
import org.apache.hadoop.hive.metastore.model.MGlobalPrivilege;
import org.apache.hadoop.hive.metastore.model.MIndex;
import org.apache.hadoop.hive.metastore.model.MMasterKey;
import org.apache.hadoop.hive.metastore.model.MNotificationLog;
import org.apache.hadoop.hive.metastore.model.MNotificationNextId;
import org.apache.hadoop.hive.metastore.model.MOrder;
import org.apache.hadoop.hive.metastore.model.MPartition;
import org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege;
import org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics;
import org.apache.hadoop.hive.metastore.model.MPartitionEvent;
import org.apache.hadoop.hive.metastore.model.MPartitionPrivilege;
import org.apache.hadoop.hive.metastore.model.MResourceUri;
import org.apache.hadoop.hive.metastore.model.MRole;
import org.apache.hadoop.hive.metastore.model.MRoleMap;
import org.apache.hadoop.hive.metastore.model.MSerDeInfo;
import org.apache.hadoop.hive.metastore.model.MStorageDescriptor;
import org.apache.hadoop.hive.metastore.model.MStringList;
import org.apache.hadoop.hive.metastore.model.MTable;
import org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege;
import org.apache.hadoop.hive.metastore.model.MTableColumnStatistics;
import org.apache.hadoop.hive.metastore.model.MTablePrivilege;
import org.apache.hadoop.hive.metastore.model.MType;
import org.apache.hadoop.hive.metastore.model.MVersionTable;
import org.apache.hadoop.hive.metastore.parser.ExpressionTree;
import org.apache.hadoop.hive.metastore.parser.ExpressionTree.ANTLRNoCaseStringStream;
import org.apache.hadoop.hive.metastore.parser.ExpressionTree.FilterBuilder;
import org.apache.hadoop.hive.metastore.parser.ExpressionTree.LeafNode;
import org.apache.hadoop.hive.metastore.parser.ExpressionTree.Operator;
import org.apache.hadoop.hive.metastore.parser.FilterLexer;
import org.apache.hadoop.hive.metastore.parser.FilterParser;
import org.apache.hadoop.hive.serde2.typeinfo.PrimitiveTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.metastore.partition.spec.PartitionSpecProxy;
import org.apache.hadoop.util.StringUtils;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.thrift.TException;
import org.datanucleus.store.rdbms.exceptions.MissingTableException;
import com.google.common.collect.Lists;
/**
* This class is the interface between the application logic and the database
* store that contains the objects. Refrain putting any logic in mode.M* objects
* or in this file as former could be auto generated and this class would need
* to be made into a interface that can read both from a database and a
* filestore.
*/
public class ObjectStore implements RawStore, Configurable {
private static Properties prop = null;
private static PersistenceManagerFactory pmf = null;
private static Lock pmfPropLock = new ReentrantLock();
/**
* Verify the schema only once per JVM since the db connection info is static
*/
private final static AtomicBoolean isSchemaVerified = new AtomicBoolean(false);
private static final Log LOG = LogFactory.getLog(ObjectStore.class.getName());
private static enum TXN_STATUS {
NO_STATE, OPEN, COMMITED, ROLLBACK
}
private static final Map<String, Class> PINCLASSMAP;
private static final String HOSTNAME;
private static final String USER;
static {
Map<String, Class> map = new HashMap<String, Class>();
map.put("table", MTable.class);
map.put("storagedescriptor", MStorageDescriptor.class);
map.put("serdeinfo", MSerDeInfo.class);
map.put("partition", MPartition.class);
map.put("database", MDatabase.class);
map.put("type", MType.class);
map.put("fieldschema", MFieldSchema.class);
map.put("order", MOrder.class);
PINCLASSMAP = Collections.unmodifiableMap(map);
String hostname = "UNKNOWN";
try {
InetAddress clientAddr = InetAddress.getLocalHost();
hostname = clientAddr.getHostAddress();
} catch (IOException e) {
}
HOSTNAME = hostname;
String user = System.getenv("USER");
if (user == null) {
USER = "UNKNOWN";
} else {
USER = user;
}
}
private boolean isInitialized = false;
private PersistenceManager pm = null;
private MetaStoreDirectSql directSql = null;
private PartitionExpressionProxy expressionProxy = null;
private Configuration hiveConf;
int openTrasactionCalls = 0;
private Transaction currentTransaction = null;
private TXN_STATUS transactionStatus = TXN_STATUS.NO_STATE;
private Pattern partitionValidationPattern;
public ObjectStore() {
}
@Override
public Configuration getConf() {
return hiveConf;
}
/**
* Called whenever this object is instantiated using ReflectionUtils, and also
* on connection retries. In cases of connection retries, conf will usually
* contain modified values.
*/
@Override
@SuppressWarnings("nls")
public void setConf(Configuration conf) {
// Although an instance of ObjectStore is accessed by one thread, there may
// be many threads with ObjectStore instances. So the static variables
// pmf and prop need to be protected with locks.
pmfPropLock.lock();
try {
isInitialized = false;
hiveConf = conf;
Properties propsFromConf = getDataSourceProps(conf);
boolean propsChanged = !propsFromConf.equals(prop);
if (propsChanged) {
pmf = null;
prop = null;
}
assert(!isActiveTransaction());
shutdown();
// Always want to re-create pm as we don't know if it were created by the
// most recent instance of the pmf
pm = null;
directSql = null;
expressionProxy = null;
openTrasactionCalls = 0;
currentTransaction = null;
transactionStatus = TXN_STATUS.NO_STATE;
initialize(propsFromConf);
String partitionValidationRegex =
hiveConf.get(HiveConf.ConfVars.METASTORE_PARTITION_NAME_WHITELIST_PATTERN.name());
if (partitionValidationRegex != null && partitionValidationRegex.equals("")) {
partitionValidationPattern = Pattern.compile(partitionValidationRegex);
} else {
partitionValidationPattern = null;
}
if (!isInitialized) {
throw new RuntimeException(
"Unable to create persistence manager. Check dss.log for details");
} else {
LOG.info("Initialized ObjectStore");
}
} finally {
pmfPropLock.unlock();
}
}
private ClassLoader classLoader;
{
classLoader = Thread.currentThread().getContextClassLoader();
if (classLoader == null) {
classLoader = ObjectStore.class.getClassLoader();
}
}
@SuppressWarnings("nls")
private void initialize(Properties dsProps) {
LOG.info("ObjectStore, initialize called");
prop = dsProps;
pm = getPersistenceManager();
isInitialized = pm != null;
if (isInitialized) {
expressionProxy = createExpressionProxy(hiveConf);
directSql = new MetaStoreDirectSql(pm, hiveConf);
}
LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm +
" created in the thread with id: " + Thread.currentThread().getId());
}
/**
* Creates the proxy used to evaluate expressions. This is here to prevent circular
* dependency - ql -> metastore client <-> metastore server -> ql. If server and
* client are split, this can be removed.
* @param conf Configuration.
* @return The partition expression proxy.
*/
private static PartitionExpressionProxy createExpressionProxy(Configuration conf) {
String className = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_EXPRESSION_PROXY_CLASS);
try {
@SuppressWarnings("unchecked")
Class<? extends PartitionExpressionProxy> clazz =
(Class<? extends PartitionExpressionProxy>)MetaStoreUtils.getClass(className);
return MetaStoreUtils.newInstance(
clazz, new Class<?>[0], new Object[0]);
} catch (MetaException e) {
LOG.error("Error loading PartitionExpressionProxy", e);
throw new RuntimeException("Error loading PartitionExpressionProxy: " + e.getMessage());
}
}
/**
* Properties specified in hive-default.xml override the properties specified
* in jpox.properties.
*/
@SuppressWarnings("nls")
private static Properties getDataSourceProps(Configuration conf) {
Properties prop = new Properties();
Iterator<Map.Entry<String, String>> iter = conf.iterator();
while (iter.hasNext()) {
Map.Entry<String, String> e = iter.next();
if (e.getKey().contains("datanucleus") || e.getKey().contains("jdo")) {
Object prevVal = prop.setProperty(e.getKey(), conf.get(e.getKey()));
if (LOG.isDebugEnabled()
&& !e.getKey().equals(HiveConf.ConfVars.METASTOREPWD.varname)) {
LOG.debug("Overriding " + e.getKey() + " value " + prevVal
+ " from jpox.properties with " + e.getValue());
}
}
}
// Password may no longer be in the conf, use getPassword()
try {
String passwd =
ShimLoader.getHadoopShims().getPassword(conf, HiveConf.ConfVars.METASTOREPWD.varname);
if (passwd != null && !passwd.isEmpty()) {
prop.setProperty(HiveConf.ConfVars.METASTOREPWD.varname, passwd);
}
} catch (IOException err) {
throw new RuntimeException("Error getting metastore password: " + err.getMessage(), err);
}
if (LOG.isDebugEnabled()) {
for (Entry<Object, Object> e : prop.entrySet()) {
if (!e.getKey().equals(HiveConf.ConfVars.METASTOREPWD.varname)) {
LOG.debug(e.getKey() + " = " + e.getValue());
}
}
}
return prop;
}
private static synchronized PersistenceManagerFactory getPMF() {
if (pmf == null) {
pmf = JDOHelper.getPersistenceManagerFactory(prop);
DataStoreCache dsc = pmf.getDataStoreCache();
if (dsc != null) {
HiveConf conf = new HiveConf(ObjectStore.class);
String objTypes = HiveConf.getVar(conf, HiveConf.ConfVars.METASTORE_CACHE_PINOBJTYPES);
LOG.info("Setting MetaStore object pin classes with hive.metastore.cache.pinobjtypes=\"" + objTypes + "\"");
if (objTypes != null && objTypes.length() > 0) {
objTypes = objTypes.toLowerCase();
String[] typeTokens = objTypes.split(",");
for (String type : typeTokens) {
type = type.trim();
if (PINCLASSMAP.containsKey(type)) {
dsc.pinAll(true, PINCLASSMAP.get(type));
}
else {
LOG.warn(type + " is not one of the pinnable object types: " + org.apache.commons.lang.StringUtils.join(PINCLASSMAP.keySet(), " "));
}
}
}
} else {
LOG.warn("PersistenceManagerFactory returned null DataStoreCache object. Unable to initialize object pin types defined by hive.metastore.cache.pinobjtypes");
}
}
return pmf;
}
@InterfaceAudience.LimitedPrivate({"HCATALOG"})
@InterfaceStability.Evolving
public PersistenceManager getPersistenceManager() {
return getPMF().getPersistenceManager();
}
@Override
public void shutdown() {
if (pm != null) {
LOG.debug("RawStore: " + this + ", with PersistenceManager: " + pm +
" will be shutdown");
pm.close();
}
}
/**
* Opens a new one or the one already created Every call of this function must
* have corresponding commit or rollback function call
*
* @return an active transaction
*/
@Override
public boolean openTransaction() {
openTrasactionCalls++;
if (openTrasactionCalls == 1) {
currentTransaction = pm.currentTransaction();
currentTransaction.begin();
transactionStatus = TXN_STATUS.OPEN;
} else {
// openTransactionCalls > 1 means this is an interior transaction
// We should already have a transaction created that is active.
if ((currentTransaction == null) || (!currentTransaction.isActive())){
throw new RuntimeException("openTransaction called in an interior"
+ " transaction scope, but currentTransaction is not active.");
}
}
boolean result = currentTransaction.isActive();
debugLog("Open transaction: count = " + openTrasactionCalls + ", isActive = " + result);
return result;
}
/**
* if this is the commit of the first open call then an actual commit is
* called.
*
* @return Always returns true
*/
@Override
@SuppressWarnings("nls")
public boolean commitTransaction() {
if (TXN_STATUS.ROLLBACK == transactionStatus) {
debugLog("Commit transaction: rollback");
return false;
}
if (openTrasactionCalls <= 0) {
RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = "
+ openTrasactionCalls + ". This probably indicates that there are unbalanced " +
"calls to openTransaction/commitTransaction");
LOG.error(e);
throw e;
}
if (!currentTransaction.isActive()) {
RuntimeException e = new RuntimeException("commitTransaction was called but openTransactionCalls = "
+ openTrasactionCalls + ". This probably indicates that there are unbalanced " +
"calls to openTransaction/commitTransaction");
LOG.error(e);
throw e;
}
openTrasactionCalls--;
debugLog("Commit transaction: count = " + openTrasactionCalls + ", isactive "+ currentTransaction.isActive());
if ((openTrasactionCalls == 0) && currentTransaction.isActive()) {
transactionStatus = TXN_STATUS.COMMITED;
currentTransaction.commit();
}
return true;
}
/**
* @return true if there is an active transaction. If the current transaction
* is either committed or rolled back it returns false
*/
public boolean isActiveTransaction() {
if (currentTransaction == null) {
return false;
}
return currentTransaction.isActive();
}
/**
* Rolls back the current transaction if it is active
*/
@Override
public void rollbackTransaction() {
if (openTrasactionCalls < 1) {
debugLog("rolling back transaction: no open transactions: " + openTrasactionCalls);
return;
}
debugLog("Rollback transaction, isActive: " + currentTransaction.isActive());
try {
if (currentTransaction.isActive()
&& transactionStatus != TXN_STATUS.ROLLBACK) {
currentTransaction.rollback();
}
} finally {
openTrasactionCalls = 0;
transactionStatus = TXN_STATUS.ROLLBACK;
// remove all detached objects from the cache, since the transaction is
// being rolled back they are no longer relevant, and this prevents them
// from reattaching in future transactions
pm.evictAll();
}
}
@Override
public void createDatabase(Database db) throws InvalidObjectException, MetaException {
boolean commited = false;
MDatabase mdb = new MDatabase();
mdb.setName(db.getName().toLowerCase());
mdb.setLocationUri(db.getLocationUri());
mdb.setDescription(db.getDescription());
mdb.setParameters(db.getParameters());
mdb.setOwnerName(db.getOwnerName());
PrincipalType ownerType = db.getOwnerType();
mdb.setOwnerType((null == ownerType ? PrincipalType.USER.name() : ownerType.name()));
try {
openTransaction();
pm.makePersistent(mdb);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
@SuppressWarnings("nls")
private MDatabase getMDatabase(String name) throws NoSuchObjectException {
MDatabase mdb = null;
boolean commited = false;
try {
openTransaction();
name = HiveStringUtils.normalizeIdentifier(name);
Query query = pm.newQuery(MDatabase.class, "name == dbname");
query.declareParameters("java.lang.String dbname");
query.setUnique(true);
mdb = (MDatabase) query.execute(name);
pm.retrieve(mdb);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
if (mdb == null) {
throw new NoSuchObjectException("There is no database named " + name);
}
return mdb;
}
@Override
public Database getDatabase(String name) throws NoSuchObjectException {
MetaException ex = null;
Database db = null;
try {
db = getDatabaseInternal(name);
} catch (MetaException e) {
// Signature restriction to NSOE, and NSOE being a flat exception prevents us from
// setting the cause of the NSOE as the MetaException. We should not lose the info
// we got here, but it's very likely that the MetaException is irrelevant and is
// actually an NSOE message, so we should log it and throw an NSOE with the msg.
ex = e;
}
if (db == null) {
LOG.warn("Failed to get database " + name +", returning NoSuchObjectException", ex);
throw new NoSuchObjectException(name + (ex == null ? "" : (": " + ex.getMessage())));
}
return db;
}
public Database getDatabaseInternal(String name) throws MetaException, NoSuchObjectException {
return new GetDbHelper(name, null, true, true) {
@Override
protected Database getSqlResult(GetHelper<Database> ctx) throws MetaException {
return directSql.getDatabase(dbName);
}
@Override
protected Database getJdoResult(GetHelper<Database> ctx) throws MetaException, NoSuchObjectException {
return getJDODatabase(dbName);
}
}.run(false);
}
public Database getJDODatabase(String name) throws NoSuchObjectException {
MDatabase mdb = null;
boolean commited = false;
try {
openTransaction();
mdb = getMDatabase(name);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
Database db = new Database();
db.setName(mdb.getName());
db.setDescription(mdb.getDescription());
db.setLocationUri(mdb.getLocationUri());
db.setParameters(convertMap(mdb.getParameters()));
db.setOwnerName(mdb.getOwnerName());
String type = mdb.getOwnerType();
db.setOwnerType((null == type || type.trim().isEmpty()) ? null : PrincipalType.valueOf(type));
return db;
}
/**
* Alter the database object in metastore. Currently only the parameters
* of the database or the owner can be changed.
* @param dbName the database name
* @param db the Hive Database object
* @throws MetaException
* @throws NoSuchObjectException
*/
@Override
public boolean alterDatabase(String dbName, Database db)
throws MetaException, NoSuchObjectException {
MDatabase mdb = null;
boolean committed = false;
try {
mdb = getMDatabase(dbName);
mdb.setParameters(db.getParameters());
mdb.setOwnerName(db.getOwnerName());
if (db.getOwnerType() != null) {
mdb.setOwnerType(db.getOwnerType().name());
}
openTransaction();
pm.makePersistent(mdb);
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
return false;
}
}
return true;
}
@Override
public boolean dropDatabase(String dbname) throws NoSuchObjectException, MetaException {
boolean success = false;
LOG.info("Dropping database " + dbname + " along with all tables");
dbname = HiveStringUtils.normalizeIdentifier(dbname);
try {
openTransaction();
// then drop the database
MDatabase db = getMDatabase(dbname);
pm.retrieve(db);
if (db != null) {
List<MDBPrivilege> dbGrants = this.listDatabaseGrants(dbname);
if (dbGrants != null && dbGrants.size() > 0) {
pm.deletePersistentAll(dbGrants);
}
pm.deletePersistent(db);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public List<String> getDatabases(String pattern) throws MetaException {
boolean commited = false;
List<String> databases = null;
try {
openTransaction();
// Take the pattern and split it on the | to get all the composing
// patterns
String[] subpatterns = pattern.trim().split("\\|");
String query = "select name from org.apache.hadoop.hive.metastore.model.MDatabase where (";
boolean first = true;
for (String subpattern : subpatterns) {
subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
if (!first) {
query = query + " || ";
}
query = query + " name.matches(\"" + subpattern + "\")";
first = false;
}
query = query + ")";
Query q = pm.newQuery(query);
q.setResult("name");
q.setOrdering("name ascending");
Collection names = (Collection) q.execute();
databases = new ArrayList<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
databases.add((String) i.next());
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return databases;
}
@Override
public List<String> getAllDatabases() throws MetaException {
return getDatabases(".*");
}
private MType getMType(Type type) {
List<MFieldSchema> fields = new ArrayList<MFieldSchema>();
if (type.getFields() != null) {
for (FieldSchema field : type.getFields()) {
fields.add(new MFieldSchema(field.getName(), field.getType(), field
.getComment()));
}
}
return new MType(type.getName(), type.getType1(), type.getType2(), fields);
}
private Type getType(MType mtype) {
List<FieldSchema> fields = new ArrayList<FieldSchema>();
if (mtype.getFields() != null) {
for (MFieldSchema field : mtype.getFields()) {
fields.add(new FieldSchema(field.getName(), field.getType(), field
.getComment()));
}
}
Type ret = new Type();
ret.setName(mtype.getName());
ret.setType1(mtype.getType1());
ret.setType2(mtype.getType2());
ret.setFields(fields);
return ret;
}
@Override
public boolean createType(Type type) {
boolean success = false;
MType mtype = getMType(type);
boolean commited = false;
try {
openTransaction();
pm.makePersistent(mtype);
commited = commitTransaction();
success = true;
} finally {
if (!commited) {
rollbackTransaction();
}
}
return success;
}
@Override
public Type getType(String typeName) {
Type type = null;
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MType.class, "name == typeName");
query.declareParameters("java.lang.String typeName");
query.setUnique(true);
MType mtype = (MType) query.execute(typeName.trim());
pm.retrieve(type);
if (mtype != null) {
type = getType(mtype);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return type;
}
@Override
public boolean dropType(String typeName) {
boolean success = false;
try {
openTransaction();
Query query = pm.newQuery(MType.class, "name == typeName");
query.declareParameters("java.lang.String typeName");
query.setUnique(true);
MType type = (MType) query.execute(typeName.trim());
pm.retrieve(type);
if (type != null) {
pm.deletePersistent(type);
}
success = commitTransaction();
} catch (JDOObjectNotFoundException e) {
success = commitTransaction();
LOG.debug("type not found " + typeName, e);
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public void createTable(Table tbl) throws InvalidObjectException, MetaException {
boolean commited = false;
try {
openTransaction();
MTable mtbl = convertToMTable(tbl);
pm.makePersistent(mtbl);
PrincipalPrivilegeSet principalPrivs = tbl.getPrivileges();
List<Object> toPersistPrivObjs = new ArrayList<Object>();
if (principalPrivs != null) {
int now = (int)(System.currentTimeMillis()/1000);
Map<String, List<PrivilegeGrantInfo>> userPrivs = principalPrivs.getUserPrivileges();
putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, userPrivs, PrincipalType.USER);
Map<String, List<PrivilegeGrantInfo>> groupPrivs = principalPrivs.getGroupPrivileges();
putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, groupPrivs, PrincipalType.GROUP);
Map<String, List<PrivilegeGrantInfo>> rolePrivs = principalPrivs.getRolePrivileges();
putPersistentPrivObjects(mtbl, toPersistPrivObjs, now, rolePrivs, PrincipalType.ROLE);
}
pm.makePersistentAll(toPersistPrivObjs);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
/**
* Convert PrivilegeGrantInfo from privMap to MTablePrivilege, and add all of
* them to the toPersistPrivObjs. These privilege objects will be persisted as
* part of createTable.
*
* @param mtbl
* @param toPersistPrivObjs
* @param now
* @param privMap
* @param type
*/
private void putPersistentPrivObjects(MTable mtbl, List<Object> toPersistPrivObjs,
int now, Map<String, List<PrivilegeGrantInfo>> privMap, PrincipalType type) {
if (privMap != null) {
for (Map.Entry<String, List<PrivilegeGrantInfo>> entry : privMap
.entrySet()) {
String principalName = entry.getKey();
List<PrivilegeGrantInfo> privs = entry.getValue();
for (int i = 0; i < privs.size(); i++) {
PrivilegeGrantInfo priv = privs.get(i);
if (priv == null) {
continue;
}
MTablePrivilege mTblSec = new MTablePrivilege(
principalName, type.toString(), mtbl, priv.getPrivilege(),
now, priv.getGrantor(), priv.getGrantorType().toString(), priv
.isGrantOption());
toPersistPrivObjs.add(mTblSec);
}
}
}
}
@Override
public boolean dropTable(String dbName, String tableName) throws MetaException,
NoSuchObjectException, InvalidObjectException, InvalidInputException {
boolean success = false;
try {
openTransaction();
MTable tbl = getMTable(dbName, tableName);
pm.retrieve(tbl);
if (tbl != null) {
// first remove all the grants
List<MTablePrivilege> tabGrants = listAllTableGrants(dbName, tableName);
if (tabGrants != null && tabGrants.size() > 0) {
pm.deletePersistentAll(tabGrants);
}
List<MTableColumnPrivilege> tblColGrants = listTableAllColumnGrants(dbName,
tableName);
if (tblColGrants != null && tblColGrants.size() > 0) {
pm.deletePersistentAll(tblColGrants);
}
List<MPartitionPrivilege> partGrants = this.listTableAllPartitionGrants(dbName, tableName);
if (partGrants != null && partGrants.size() > 0) {
pm.deletePersistentAll(partGrants);
}
List<MPartitionColumnPrivilege> partColGrants = listTableAllPartitionColumnGrants(dbName,
tableName);
if (partColGrants != null && partColGrants.size() > 0) {
pm.deletePersistentAll(partColGrants);
}
// delete column statistics if present
try {
deleteTableColumnStatistics(dbName, tableName, null);
} catch (NoSuchObjectException e) {
LOG.info("Found no table level column statistics associated with db " + dbName +
" table " + tableName + " record to delete");
}
preDropStorageDescriptor(tbl.getSd());
// then remove the table
pm.deletePersistentAll(tbl);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public Table getTable(String dbName, String tableName) throws MetaException {
boolean commited = false;
Table tbl = null;
try {
openTransaction();
tbl = convertToTable(getMTable(dbName, tableName));
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return tbl;
}
@Override
public List<String> getTables(String dbName, String pattern)
throws MetaException {
boolean commited = false;
List<String> tbls = null;
try {
openTransaction();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
// Take the pattern and split it on the | to get all the composing
// patterns
String[] subpatterns = pattern.trim().split("\\|");
String query =
"select tableName from org.apache.hadoop.hive.metastore.model.MTable "
+ "where database.name == dbName && (";
boolean first = true;
for (String subpattern : subpatterns) {
subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
if (!first) {
query = query + " || ";
}
query = query + " tableName.matches(\"" + subpattern + "\")";
first = false;
}
query = query + ")";
Query q = pm.newQuery(query);
q.declareParameters("java.lang.String dbName");
q.setResult("tableName");
q.setOrdering("tableName ascending");
Collection names = (Collection) q.execute(dbName);
tbls = new ArrayList<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
tbls.add((String) i.next());
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return tbls;
}
@Override
public List<String> getAllTables(String dbName) throws MetaException {
return getTables(dbName, ".*");
}
private MTable getMTable(String db, String table) {
MTable mtbl = null;
boolean commited = false;
try {
openTransaction();
db = HiveStringUtils.normalizeIdentifier(db);
table = HiveStringUtils.normalizeIdentifier(table);
Query query = pm.newQuery(MTable.class, "tableName == table && database.name == db");
query.declareParameters("java.lang.String table, java.lang.String db");
query.setUnique(true);
mtbl = (MTable) query.execute(table, db);
pm.retrieve(mtbl);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return mtbl;
}
@Override
public List<Table> getTableObjectsByName(String db, List<String> tbl_names)
throws MetaException, UnknownDBException {
List<Table> tables = new ArrayList<Table>();
boolean committed = false;
try {
openTransaction();
db = HiveStringUtils.normalizeIdentifier(db);
Query dbExistsQuery = pm.newQuery(MDatabase.class, "name == db");
dbExistsQuery.declareParameters("java.lang.String db");
dbExistsQuery.setUnique(true);
dbExistsQuery.setResult("name");
String dbNameIfExists = (String) dbExistsQuery.execute(db);
if (dbNameIfExists == null || dbNameIfExists.isEmpty()) {
throw new UnknownDBException("Could not find database " + db);
}
List<String> lowered_tbl_names = new ArrayList<String>();
for (String t : tbl_names) {
lowered_tbl_names.add(HiveStringUtils.normalizeIdentifier(t));
}
Query query = pm.newQuery(MTable.class);
query.setFilter("database.name == db && tbl_names.contains(tableName)");
query.declareParameters("java.lang.String db, java.util.Collection tbl_names");
Collection mtables = (Collection) query.execute(db, lowered_tbl_names);
for (Iterator iter = mtables.iterator(); iter.hasNext();) {
tables.add(convertToTable((MTable) iter.next()));
}
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
}
}
return tables;
}
/** Makes shallow copy of a list to avoid DataNucleus mucking with our objects. */
private <T> List<T> convertList(List<T> dnList) {
return (dnList == null) ? null : Lists.newArrayList(dnList);
}
/** Makes shallow copy of a map to avoid DataNucleus mucking with our objects. */
private Map<String, String> convertMap(Map<String, String> dnMap) {
return MetaStoreUtils.trimMapNulls(dnMap,
HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_ORM_RETRIEVE_MAPNULLS_AS_EMPTY_STRINGS));
}
private Table convertToTable(MTable mtbl) throws MetaException {
if (mtbl == null) {
return null;
}
String tableType = mtbl.getTableType();
if (tableType == null) {
// for backwards compatibility with old metastore persistence
if (mtbl.getViewOriginalText() != null) {
tableType = TableType.VIRTUAL_VIEW.toString();
} else if ("TRUE".equals(mtbl.getParameters().get("EXTERNAL"))) {
tableType = TableType.EXTERNAL_TABLE.toString();
} else {
tableType = TableType.MANAGED_TABLE.toString();
}
}
return new Table(mtbl.getTableName(), mtbl.getDatabase().getName(), mtbl
.getOwner(), mtbl.getCreateTime(), mtbl.getLastAccessTime(), mtbl
.getRetention(), convertToStorageDescriptor(mtbl.getSd()),
convertToFieldSchemas(mtbl.getPartitionKeys()), convertMap(mtbl.getParameters()),
mtbl.getViewOriginalText(), mtbl.getViewExpandedText(), tableType);
}
private MTable convertToMTable(Table tbl) throws InvalidObjectException,
MetaException {
if (tbl == null) {
return null;
}
MDatabase mdb = null;
try {
mdb = getMDatabase(tbl.getDbName());
} catch (NoSuchObjectException e) {
LOG.error(StringUtils.stringifyException(e));
throw new InvalidObjectException("Database " + tbl.getDbName()
+ " doesn't exist.");
}
// If the table has property EXTERNAL set, update table type
// accordingly
String tableType = tbl.getTableType();
boolean isExternal = "TRUE".equals(tbl.getParameters().get("EXTERNAL"));
if (TableType.MANAGED_TABLE.toString().equals(tableType)) {
if (isExternal) {
tableType = TableType.EXTERNAL_TABLE.toString();
}
}
if (TableType.EXTERNAL_TABLE.toString().equals(tableType)) {
if (!isExternal) {
tableType = TableType.MANAGED_TABLE.toString();
}
}
// A new table is always created with a new column descriptor
return new MTable(HiveStringUtils.normalizeIdentifier(tbl.getTableName()), mdb,
convertToMStorageDescriptor(tbl.getSd()), tbl.getOwner(), tbl
.getCreateTime(), tbl.getLastAccessTime(), tbl.getRetention(),
convertToMFieldSchemas(tbl.getPartitionKeys()), tbl.getParameters(),
tbl.getViewOriginalText(), tbl.getViewExpandedText(),
tableType);
}
private List<MFieldSchema> convertToMFieldSchemas(List<FieldSchema> keys) {
List<MFieldSchema> mkeys = null;
if (keys != null) {
mkeys = new ArrayList<MFieldSchema>(keys.size());
for (FieldSchema part : keys) {
mkeys.add(new MFieldSchema(HiveStringUtils.normalizeIdentifier(part.getName()),
part.getType(), part.getComment()));
}
}
return mkeys;
}
private List<FieldSchema> convertToFieldSchemas(List<MFieldSchema> mkeys) {
List<FieldSchema> keys = null;
if (mkeys != null) {
keys = new ArrayList<FieldSchema>(mkeys.size());
for (MFieldSchema part : mkeys) {
keys.add(new FieldSchema(part.getName(), part.getType(), part
.getComment()));
}
}
return keys;
}
private List<MOrder> convertToMOrders(List<Order> keys) {
List<MOrder> mkeys = null;
if (keys != null) {
mkeys = new ArrayList<MOrder>(keys.size());
for (Order part : keys) {
mkeys.add(new MOrder(HiveStringUtils.normalizeIdentifier(part.getCol()), part.getOrder()));
}
}
return mkeys;
}
private List<Order> convertToOrders(List<MOrder> mkeys) {
List<Order> keys = null;
if (mkeys != null) {
keys = new ArrayList<Order>(mkeys.size());
for (MOrder part : mkeys) {
keys.add(new Order(part.getCol(), part.getOrder()));
}
}
return keys;
}
private SerDeInfo convertToSerDeInfo(MSerDeInfo ms) throws MetaException {
if (ms == null) {
throw new MetaException("Invalid SerDeInfo object");
}
return new SerDeInfo(ms.getName(), ms.getSerializationLib(), convertMap(ms.getParameters()));
}
private MSerDeInfo convertToMSerDeInfo(SerDeInfo ms) throws MetaException {
if (ms == null) {
throw new MetaException("Invalid SerDeInfo object");
}
return new MSerDeInfo(ms.getName(), ms.getSerializationLib(), ms
.getParameters());
}
/**
* Given a list of model field schemas, create a new model column descriptor.
* @param cols the columns the column descriptor contains
* @return a new column descriptor db-backed object
*/
private MColumnDescriptor createNewMColumnDescriptor(List<MFieldSchema> cols) {
if (cols == null) {
return null;
}
return new MColumnDescriptor(cols);
}
// MSD and SD should be same objects. Not sure how to make then same right now
// MSerdeInfo *& SerdeInfo should be same as well
private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd,
boolean noFS)
throws MetaException {
if (msd == null) {
return null;
}
List<MFieldSchema> mFieldSchemas = msd.getCD() == null ? null : msd.getCD().getCols();
StorageDescriptor sd = new StorageDescriptor(noFS ? null : convertToFieldSchemas(mFieldSchemas),
msd.getLocation(), msd.getInputFormat(), msd.getOutputFormat(), msd
.isCompressed(), msd.getNumBuckets(), convertToSerDeInfo(msd
.getSerDeInfo()), convertList(msd.getBucketCols()), convertToOrders(msd
.getSortCols()), convertMap(msd.getParameters()));
SkewedInfo skewedInfo = new SkewedInfo(convertList(msd.getSkewedColNames()),
convertToSkewedValues(msd.getSkewedColValues()),
covertToSkewedMap(msd.getSkewedColValueLocationMaps()));
sd.setSkewedInfo(skewedInfo);
sd.setStoredAsSubDirectories(msd.isStoredAsSubDirectories());
return sd;
}
private StorageDescriptor convertToStorageDescriptor(MStorageDescriptor msd)
throws MetaException {
return convertToStorageDescriptor(msd, false);
}
/**
* Convert a list of MStringList to a list of list string
*
* @param mLists
* @return
*/
private List<List<String>> convertToSkewedValues(List<MStringList> mLists) {
List<List<String>> lists = null;
if (mLists != null) {
lists = new ArrayList<List<String>>(mLists.size());
for (MStringList element : mLists) {
lists.add(new ArrayList<String>(element.getInternalList()));
}
}
return lists;
}
private List<MStringList> convertToMStringLists(List<List<String>> mLists) {
List<MStringList> lists = null ;
if (null != mLists) {
lists = new ArrayList<MStringList>();
for (List<String> mList : mLists) {
lists.add(new MStringList(mList));
}
}
return lists;
}
/**
* Convert a MStringList Map to a Map
* @param mMap
* @return
*/
private Map<List<String>, String> covertToSkewedMap(Map<MStringList, String> mMap) {
Map<List<String>, String> map = null;
if (mMap != null) {
map = new HashMap<List<String>, String>(mMap.size());
Set<MStringList> keys = mMap.keySet();
for (MStringList key : keys) {
map.put(new ArrayList<String>(key.getInternalList()), mMap.get(key));
}
}
return map;
}
/**
* Covert a Map to a MStringList Map
* @param mMap
* @return
*/
private Map<MStringList, String> covertToMapMStringList(Map<List<String>, String> mMap) {
Map<MStringList, String> map = null;
if (mMap != null) {
map = new HashMap<MStringList, String>(mMap.size());
Set<List<String>> keys = mMap.keySet();
for (List<String> key : keys) {
map.put(new MStringList(key), mMap.get(key));
}
}
return map;
}
/**
* Converts a storage descriptor to a db-backed storage descriptor. Creates a
* new db-backed column descriptor object for this SD.
* @param sd the storage descriptor to wrap in a db-backed object
* @return the storage descriptor db-backed object
* @throws MetaException
*/
private MStorageDescriptor convertToMStorageDescriptor(StorageDescriptor sd)
throws MetaException {
if (sd == null) {
return null;
}
MColumnDescriptor mcd = createNewMColumnDescriptor(convertToMFieldSchemas(sd.getCols()));
return convertToMStorageDescriptor(sd, mcd);
}
/**
* Converts a storage descriptor to a db-backed storage descriptor. It points the
* storage descriptor's column descriptor to the one passed as an argument,
* so it does not create a new mcolumn descriptor object.
* @param sd the storage descriptor to wrap in a db-backed object
* @param mcd the db-backed column descriptor
* @return the db-backed storage descriptor object
* @throws MetaException
*/
private MStorageDescriptor convertToMStorageDescriptor(StorageDescriptor sd,
MColumnDescriptor mcd) throws MetaException {
if (sd == null) {
return null;
}
return new MStorageDescriptor(mcd, sd
.getLocation(), sd.getInputFormat(), sd.getOutputFormat(), sd
.isCompressed(), sd.getNumBuckets(), convertToMSerDeInfo(sd
.getSerdeInfo()), sd.getBucketCols(),
convertToMOrders(sd.getSortCols()), sd.getParameters(),
(null == sd.getSkewedInfo()) ? null
: sd.getSkewedInfo().getSkewedColNames(),
convertToMStringLists((null == sd.getSkewedInfo()) ? null : sd.getSkewedInfo()
.getSkewedColValues()),
covertToMapMStringList((null == sd.getSkewedInfo()) ? null : sd.getSkewedInfo()
.getSkewedColValueLocationMaps()), sd.isStoredAsSubDirectories());
}
@Override
public boolean addPartitions(String dbName, String tblName, List<Partition> parts)
throws InvalidObjectException, MetaException {
boolean success = false;
openTransaction();
try {
List<MTablePrivilege> tabGrants = null;
List<MTableColumnPrivilege> tabColumnGrants = null;
MTable table = this.getMTable(dbName, tblName);
if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
tabGrants = this.listAllTableGrants(dbName, tblName);
tabColumnGrants = this.listTableAllColumnGrants(dbName, tblName);
}
List<Object> toPersist = new ArrayList<Object>();
for (Partition part : parts) {
if (!part.getTableName().equals(tblName) || !part.getDbName().equals(dbName)) {
throw new MetaException("Partition does not belong to target table "
+ dbName + "." + tblName + ": " + part);
}
MPartition mpart = convertToMPart(part, true);
toPersist.add(mpart);
int now = (int)(System.currentTimeMillis()/1000);
if (tabGrants != null) {
for (MTablePrivilege tab: tabGrants) {
toPersist.add(new MPartitionPrivilege(tab.getPrincipalName(),
tab.getPrincipalType(), mpart, tab.getPrivilege(), now,
tab.getGrantor(), tab.getGrantorType(), tab.getGrantOption()));
}
}
if (tabColumnGrants != null) {
for (MTableColumnPrivilege col : tabColumnGrants) {
toPersist.add(new MPartitionColumnPrivilege(col.getPrincipalName(),
col.getPrincipalType(), mpart, col.getColumnName(), col.getPrivilege(),
now, col.getGrantor(), col.getGrantorType(), col.getGrantOption()));
}
}
}
if (toPersist.size() > 0) {
pm.makePersistentAll(toPersist);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
private boolean isValidPartition(
Partition part, boolean ifNotExists) throws MetaException {
MetaStoreUtils.validatePartitionNameCharacters(part.getValues(),
partitionValidationPattern);
boolean doesExist = doesPartitionExist(
part.getDbName(), part.getTableName(), part.getValues());
if (doesExist && !ifNotExists) {
throw new MetaException("Partition already exists: " + part);
}
return !doesExist;
}
@Override
public boolean addPartitions(String dbName, String tblName,
PartitionSpecProxy partitionSpec, boolean ifNotExists)
throws InvalidObjectException, MetaException {
boolean success = false;
openTransaction();
try {
List<MTablePrivilege> tabGrants = null;
List<MTableColumnPrivilege> tabColumnGrants = null;
MTable table = this.getMTable(dbName, tblName);
if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
tabGrants = this.listAllTableGrants(dbName, tblName);
tabColumnGrants = this.listTableAllColumnGrants(dbName, tblName);
}
if (!partitionSpec.getTableName().equals(tblName) || !partitionSpec.getDbName().equals(dbName)) {
throw new MetaException("Partition does not belong to target table "
+ dbName + "." + tblName + ": " + partitionSpec);
}
PartitionSpecProxy.PartitionIterator iterator = partitionSpec.getPartitionIterator();
int now = (int)(System.currentTimeMillis()/1000);
while (iterator.hasNext()) {
Partition part = iterator.next();
if (isValidPartition(part, ifNotExists)) {
MPartition mpart = convertToMPart(part, true);
pm.makePersistent(mpart);
if (tabGrants != null) {
for (MTablePrivilege tab : tabGrants) {
pm.makePersistent(new MPartitionPrivilege(tab.getPrincipalName(),
tab.getPrincipalType(), mpart, tab.getPrivilege(), now,
tab.getGrantor(), tab.getGrantorType(), tab.getGrantOption()));
}
}
if (tabColumnGrants != null) {
for (MTableColumnPrivilege col : tabColumnGrants) {
pm.makePersistent(new MPartitionColumnPrivilege(col.getPrincipalName(),
col.getPrincipalType(), mpart, col.getColumnName(), col.getPrivilege(),
now, col.getGrantor(), col.getGrantorType(), col.getGrantOption()));
}
}
}
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public boolean addPartition(Partition part) throws InvalidObjectException,
MetaException {
boolean success = false;
boolean commited = false;
try {
MTable table = this.getMTable(part.getDbName(), part.getTableName());
List<MTablePrivilege> tabGrants = null;
List<MTableColumnPrivilege> tabColumnGrants = null;
if ("TRUE".equalsIgnoreCase(table.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
tabGrants = this.listAllTableGrants(part
.getDbName(), part.getTableName());
tabColumnGrants = this.listTableAllColumnGrants(
part.getDbName(), part.getTableName());
}
openTransaction();
MPartition mpart = convertToMPart(part, true);
pm.makePersistent(mpart);
int now = (int)(System.currentTimeMillis()/1000);
List<Object> toPersist = new ArrayList<Object>();
if (tabGrants != null) {
for (MTablePrivilege tab: tabGrants) {
MPartitionPrivilege partGrant = new MPartitionPrivilege(tab
.getPrincipalName(), tab.getPrincipalType(),
mpart, tab.getPrivilege(), now, tab.getGrantor(), tab
.getGrantorType(), tab.getGrantOption());
toPersist.add(partGrant);
}
}
if (tabColumnGrants != null) {
for (MTableColumnPrivilege col : tabColumnGrants) {
MPartitionColumnPrivilege partColumn = new MPartitionColumnPrivilege(col
.getPrincipalName(), col.getPrincipalType(), mpart, col
.getColumnName(), col.getPrivilege(), now, col.getGrantor(), col
.getGrantorType(), col.getGrantOption());
toPersist.add(partColumn);
}
if (toPersist.size() > 0) {
pm.makePersistentAll(toPersist);
}
}
commited = commitTransaction();
success = true;
} finally {
if (!commited) {
rollbackTransaction();
}
}
return success;
}
@Override
public Partition getPartition(String dbName, String tableName,
List<String> part_vals) throws NoSuchObjectException, MetaException {
openTransaction();
Partition part = convertToPart(getMPartition(dbName, tableName, part_vals));
commitTransaction();
if(part == null) {
throw new NoSuchObjectException("partition values="
+ part_vals.toString());
}
part.setValues(part_vals);
return part;
}
private MPartition getMPartition(String dbName, String tableName,
List<String> part_vals) throws MetaException {
MPartition mpart = null;
boolean commited = false;
try {
openTransaction();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
MTable mtbl = getMTable(dbName, tableName);
if (mtbl == null) {
commited = commitTransaction();
return null;
}
// Change the query to use part_vals instead of the name which is
// redundant TODO: callers of this often get part_vals out of name for no reason...
String name = Warehouse.makePartName(convertToFieldSchemas(mtbl
.getPartitionKeys()), part_vals);
Query query = pm.newQuery(MPartition.class,
"table.tableName == t1 && table.database.name == t2 && partitionName == t3");
query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
query.setUnique(true);
mpart = (MPartition) query.execute(tableName, dbName, name);
pm.retrieve(mpart);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return mpart;
}
/**
* Convert a Partition object into an MPartition, which is an object backed by the db
* If the Partition's set of columns is the same as the parent table's AND useTableCD
* is true, then this partition's storage descriptor's column descriptor will point
* to the same one as the table's storage descriptor.
* @param part the partition to convert
* @param useTableCD whether to try to use the parent table's column descriptor.
* @return the model partition object
* @throws InvalidObjectException
* @throws MetaException
*/
private MPartition convertToMPart(Partition part, boolean useTableCD)
throws InvalidObjectException, MetaException {
if (part == null) {
return null;
}
MTable mt = getMTable(part.getDbName(), part.getTableName());
if (mt == null) {
throw new InvalidObjectException(
"Partition doesn't have a valid table or database name");
}
// If this partition's set of columns is the same as the parent table's,
// use the parent table's, so we do not create a duplicate column descriptor,
// thereby saving space
MStorageDescriptor msd;
if (useTableCD &&
mt.getSd() != null && mt.getSd().getCD() != null &&
mt.getSd().getCD().getCols() != null &&
part.getSd() != null &&
convertToFieldSchemas(mt.getSd().getCD().getCols()).
equals(part.getSd().getCols())) {
msd = convertToMStorageDescriptor(part.getSd(), mt.getSd().getCD());
} else {
msd = convertToMStorageDescriptor(part.getSd());
}
return new MPartition(Warehouse.makePartName(convertToFieldSchemas(mt
.getPartitionKeys()), part.getValues()), mt, part.getValues(), part
.getCreateTime(), part.getLastAccessTime(),
msd, part.getParameters());
}
private Partition convertToPart(MPartition mpart) throws MetaException {
if (mpart == null) {
return null;
}
return new Partition(convertList(mpart.getValues()), mpart.getTable().getDatabase()
.getName(), mpart.getTable().getTableName(), mpart.getCreateTime(),
mpart.getLastAccessTime(), convertToStorageDescriptor(mpart.getSd()),
convertMap(mpart.getParameters()));
}
private Partition convertToPart(String dbName, String tblName, MPartition mpart)
throws MetaException {
if (mpart == null) {
return null;
}
return new Partition(convertList(mpart.getValues()), dbName, tblName,
mpart.getCreateTime(), mpart.getLastAccessTime(),
convertToStorageDescriptor(mpart.getSd(), false), convertMap(mpart.getParameters()));
}
@Override
public boolean dropPartition(String dbName, String tableName,
List<String> part_vals) throws MetaException, NoSuchObjectException, InvalidObjectException,
InvalidInputException {
boolean success = false;
try {
openTransaction();
MPartition part = getMPartition(dbName, tableName, part_vals);
dropPartitionCommon(part);
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public void dropPartitions(String dbName, String tblName, List<String> partNames)
throws MetaException, NoSuchObjectException {
if (partNames.isEmpty()) return;
boolean success = false;
openTransaction();
try {
// Delete all things.
dropPartitionGrantsNoTxn(dbName, tblName, partNames);
dropPartitionAllColumnGrantsNoTxn(dbName, tblName, partNames);
dropPartitionColumnStatisticsNoTxn(dbName, tblName, partNames);
// CDs are reused; go thry partition SDs, detach all CDs from SDs, then remove unused CDs.
for (MColumnDescriptor mcd : detachCdsFromSdsNoTxn(dbName, tblName, partNames)) {
removeUnusedColumnDescriptor(mcd);
}
dropPartitionsNoTxn(dbName, tblName, partNames);
if (!(success = commitTransaction())) {
throw new MetaException("Failed to drop partitions"); // Should not happen?
}
} finally {
if (!success) {
rollbackTransaction();
}
}
}
/**
* Drop an MPartition and cascade deletes (e.g., delete partition privilege grants,
* drop the storage descriptor cleanly, etc.)
* @param part - the MPartition to drop
* @return whether the transaction committed successfully
* @throws InvalidInputException
* @throws InvalidObjectException
* @throws MetaException
* @throws NoSuchObjectException
*/
private boolean dropPartitionCommon(MPartition part) throws NoSuchObjectException, MetaException,
InvalidObjectException, InvalidInputException {
boolean success = false;
try {
openTransaction();
if (part != null) {
List<MFieldSchema> schemas = part.getTable().getPartitionKeys();
List<String> colNames = new ArrayList<String>();
for (MFieldSchema col: schemas) {
colNames.add(col.getName());
}
String partName = FileUtils.makePartName(colNames, part.getValues());
List<MPartitionPrivilege> partGrants = listPartitionGrants(
part.getTable().getDatabase().getName(),
part.getTable().getTableName(),
Lists.newArrayList(partName));
if (partGrants != null && partGrants.size() > 0) {
pm.deletePersistentAll(partGrants);
}
List<MPartitionColumnPrivilege> partColumnGrants = listPartitionAllColumnGrants(
part.getTable().getDatabase().getName(),
part.getTable().getTableName(),
Lists.newArrayList(partName));
if (partColumnGrants != null && partColumnGrants.size() > 0) {
pm.deletePersistentAll(partColumnGrants);
}
String dbName = part.getTable().getDatabase().getName();
String tableName = part.getTable().getTableName();
// delete partition level column stats if it exists
try {
deletePartitionColumnStatistics(dbName, tableName, partName, part.getValues(), null);
} catch (NoSuchObjectException e) {
LOG.info("No column statistics records found to delete");
}
preDropStorageDescriptor(part.getSd());
pm.deletePersistent(part);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
@Override
public List<Partition> getPartitions(
String dbName, String tableName, int maxParts) throws MetaException, NoSuchObjectException {
return getPartitionsInternal(dbName, tableName, maxParts, true, true);
}
protected List<Partition> getPartitionsInternal(
String dbName, String tblName, final int maxParts, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
@Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
Integer max = (maxParts < 0) ? null : maxParts;
return directSql.getPartitions(dbName, tblName, max);
}
@Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return convertToParts(listMPartitions(dbName, tblName, maxParts));
}
}.run(false);
}
@Override
public List<Partition> getPartitionsWithAuth(String dbName, String tblName,
short max, String userName, List<String> groupNames)
throws MetaException, NoSuchObjectException, InvalidObjectException {
boolean success = false;
try {
openTransaction();
List<MPartition> mparts = listMPartitions(dbName, tblName, max);
List<Partition> parts = new ArrayList<Partition>(mparts.size());
if (mparts != null && mparts.size()>0) {
for (MPartition mpart : mparts) {
MTable mtbl = mpart.getTable();
Partition part = convertToPart(mpart);
parts.add(part);
if ("TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
.getPartitionKeys()), part.getValues());
PrincipalPrivilegeSet partAuth = this.getPartitionPrivilegeSet(dbName,
tblName, partName, userName, groupNames);
part.setPrivileges(partAuth);
}
}
}
success = commitTransaction();
return parts;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public Partition getPartitionWithAuth(String dbName, String tblName,
List<String> partVals, String user_name, List<String> group_names)
throws NoSuchObjectException, MetaException, InvalidObjectException {
boolean success = false;
try {
openTransaction();
MPartition mpart = getMPartition(dbName, tblName, partVals);
if (mpart == null) {
commitTransaction();
throw new NoSuchObjectException("partition values="
+ partVals.toString());
}
Partition part = null;
MTable mtbl = mpart.getTable();
part = convertToPart(mpart);
if ("TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
.getPartitionKeys()), partVals);
PrincipalPrivilegeSet partAuth = this.getPartitionPrivilegeSet(dbName,
tblName, partName, user_name, group_names);
part.setPrivileges(partAuth);
}
success = commitTransaction();
return part;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<Partition> convertToParts(List<MPartition> mparts) throws MetaException {
return convertToParts(mparts, null);
}
private List<Partition> convertToParts(List<MPartition> src, List<Partition> dest)
throws MetaException {
if (src == null) {
return dest;
}
if (dest == null) {
dest = new ArrayList<Partition>(src.size());
}
for (MPartition mp : src) {
dest.add(convertToPart(mp));
}
return dest;
}
private List<Partition> convertToParts(String dbName, String tblName, List<MPartition> mparts)
throws MetaException {
List<Partition> parts = new ArrayList<Partition>(mparts.size());
for (MPartition mp : mparts) {
parts.add(convertToPart(dbName, tblName, mp));
}
return parts;
}
// TODO:pc implement max
@Override
public List<String> listPartitionNames(String dbName, String tableName,
short max) throws MetaException {
List<String> pns = null;
boolean success = false;
try {
openTransaction();
LOG.debug("Executing getPartitionNames");
pns = getPartitionNamesNoTxn(dbName, tableName, max);
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return pns;
}
private List<String> getPartitionNamesNoTxn(String dbName, String tableName, short max) {
List<String> pns = new ArrayList<String>();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
Query q = pm.newQuery(
"select partitionName from org.apache.hadoop.hive.metastore.model.MPartition "
+ "where table.database.name == t1 && table.tableName == t2 "
+ "order by partitionName asc");
q.declareParameters("java.lang.String t1, java.lang.String t2");
q.setResult("partitionName");
if(max > 0) {
q.setRange(0, max);
}
Collection names = (Collection) q.execute(dbName, tableName);
for (Iterator i = names.iterator(); i.hasNext();) {
pns.add((String) i.next());
}
return pns;
}
/**
* Retrieves a Collection of partition-related results from the database that match
* the partial specification given for a specific table.
* @param dbName the name of the database
* @param tableName the name of the table
* @param part_vals the partial specification values
* @param max_parts the maximum number of partitions to return
* @param resultsCol the metadata column of the data to return, e.g. partitionName, etc.
* if resultsCol is empty or null, a collection of MPartition objects is returned
* @throws NoSuchObjectException
* @results A Collection of partition-related items from the db that match the partial spec
* for a table. The type of each item in the collection corresponds to the column
* you want results for. E.g., if resultsCol is partitionName, the Collection
* has types of String, and if resultsCol is null, the types are MPartition.
*/
private Collection getPartitionPsQueryResults(String dbName, String tableName,
List<String> part_vals, short max_parts, String resultsCol)
throws MetaException, NoSuchObjectException {
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
Table table = getTable(dbName, tableName);
if (table == null) {
throw new NoSuchObjectException(dbName + "." + tableName + " table not found");
}
List<FieldSchema> partCols = table.getPartitionKeys();
int numPartKeys = partCols.size();
if (part_vals.size() > numPartKeys) {
throw new MetaException("Incorrect number of partition values");
}
partCols = partCols.subList(0, part_vals.size());
//Construct a pattern of the form: partKey=partVal/partKey2=partVal2/...
// where partVal is either the escaped partition value given as input,
// or a regex of the form ".*"
//This works because the "=" and "/" separating key names and partition key/values
// are not escaped.
String partNameMatcher = Warehouse.makePartName(partCols, part_vals, ".*");
//add ".*" to the regex to match anything else afterwards the partial spec.
if (part_vals.size() < numPartKeys) {
partNameMatcher += ".*";
}
Query q = pm.newQuery(MPartition.class);
StringBuilder queryFilter = new StringBuilder("table.database.name == dbName");
queryFilter.append(" && table.tableName == tableName");
queryFilter.append(" && partitionName.matches(partialRegex)");
q.setFilter(queryFilter.toString());
q.declareParameters("java.lang.String dbName, " +
"java.lang.String tableName, java.lang.String partialRegex");
if( max_parts >= 0 ) {
//User specified a row limit, set it on the Query
q.setRange(0, max_parts);
}
if (resultsCol != null && !resultsCol.isEmpty()) {
q.setResult(resultsCol);
}
return (Collection) q.execute(dbName, tableName, partNameMatcher);
}
@Override
public List<Partition> listPartitionsPsWithAuth(String db_name, String tbl_name,
List<String> part_vals, short max_parts, String userName, List<String> groupNames)
throws MetaException, InvalidObjectException, NoSuchObjectException {
List<Partition> partitions = new ArrayList<Partition>();
boolean success = false;
try {
openTransaction();
LOG.debug("executing listPartitionNamesPsWithAuth");
Collection parts = getPartitionPsQueryResults(db_name, tbl_name,
part_vals, max_parts, null);
MTable mtbl = getMTable(db_name, tbl_name);
for (Object o : parts) {
Partition part = convertToPart((MPartition) o);
//set auth privileges
if (null != userName && null != groupNames &&
"TRUE".equalsIgnoreCase(mtbl.getParameters().get("PARTITION_LEVEL_PRIVILEGE"))) {
String partName = Warehouse.makePartName(this.convertToFieldSchemas(mtbl
.getPartitionKeys()), part.getValues());
PrincipalPrivilegeSet partAuth = getPartitionPrivilegeSet(db_name,
tbl_name, partName, userName, groupNames);
part.setPrivileges(partAuth);
}
partitions.add(part);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return partitions;
}
@Override
public List<String> listPartitionNamesPs(String dbName, String tableName,
List<String> part_vals, short max_parts) throws MetaException, NoSuchObjectException {
List<String> partitionNames = new ArrayList<String>();
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPartitionNamesPs");
Collection names = getPartitionPsQueryResults(dbName, tableName,
part_vals, max_parts, "partitionName");
for (Object o : names) {
partitionNames.add((String) o);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return partitionNames;
}
// TODO:pc implement max
private List<MPartition> listMPartitions(String dbName, String tableName,
int max) {
boolean success = false;
List<MPartition> mparts = null;
try {
openTransaction();
LOG.debug("Executing listMPartitions");
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
Query query = pm.newQuery(MPartition.class,
"table.tableName == t1 && table.database.name == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
query.setOrdering("partitionName ascending");
if(max > 0) {
query.setRange(0, max);
}
mparts = (List<MPartition>) query.execute(tableName, dbName);
LOG.debug("Done executing query for listMPartitions");
pm.retrieveAll(mparts);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMPartitions " + mparts);
} finally {
if (!success) {
rollbackTransaction();
}
}
return mparts;
}
@Override
public List<Partition> getPartitionsByNames(String dbName, String tblName,
List<String> partNames) throws MetaException, NoSuchObjectException {
return getPartitionsByNamesInternal(dbName, tblName, partNames, true, true);
}
protected List<Partition> getPartitionsByNamesInternal(String dbName, String tblName,
final List<String> partNames, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
@Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
return directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames);
}
@Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(dbName, tblName, partNames);
}
}.run(false);
}
@Override
public boolean getPartitionsByExpr(String dbName, String tblName, byte[] expr,
String defaultPartitionName, short maxParts, List<Partition> result) throws TException {
return getPartitionsByExprInternal(
dbName, tblName, expr, defaultPartitionName, maxParts, result, true, true);
}
protected boolean getPartitionsByExprInternal(String dbName, String tblName, final byte[] expr,
final String defaultPartitionName, final short maxParts, List<Partition> result,
boolean allowSql, boolean allowJdo) throws TException {
assert result != null;
// We will try pushdown first, so make the filter. This will also validate the expression,
// if serialization fails we will throw incompatible metastore error to the client.
String filter = null;
try {
filter = expressionProxy.convertExprToFilter(expr);
} catch (MetaException ex) {
throw new IMetaStoreClient.IncompatibleMetastoreException(ex.getMessage());
}
// Make a tree out of the filter.
// TODO: this is all pretty ugly. The only reason we need all these transformations
// is to maintain support for simple filters for HCat users that query metastore.
// If forcing everyone to use thick client is out of the question, maybe we could
// parse the filter into standard hive expressions and not all this separate tree
// Filter.g stuff. That way this method and ...ByFilter would just be merged.
final ExpressionTree exprTree = makeExpressionTree(filter);
final AtomicBoolean hasUnknownPartitions = new AtomicBoolean(false);
result.addAll(new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
@Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
// If we have some sort of expression tree, try SQL filter pushdown.
List<Partition> result = null;
if (exprTree != null) {
result = directSql.getPartitionsViaSqlFilter(ctx.getTable(), exprTree, null);
}
if (result == null) {
// We couldn't do SQL filter pushdown. Get names via normal means.
List<String> partNames = new LinkedList<String>();
hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn(
ctx.getTable(), expr, defaultPartitionName, maxParts, partNames));
result = directSql.getPartitionsViaSqlFilter(dbName, tblName, partNames);
}
return result;
}
@Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
// If we have some sort of expression tree, try JDOQL filter pushdown.
List<Partition> result = null;
if (exprTree != null) {
result = getPartitionsViaOrmFilter(ctx.getTable(), exprTree, maxParts, false);
}
if (result == null) {
// We couldn't do JDOQL filter pushdown. Get names via normal means.
List<String> partNames = new ArrayList<String>();
hasUnknownPartitions.set(getPartitionNamesPrunedByExprNoTxn(
ctx.getTable(), expr, defaultPartitionName, maxParts, partNames));
result = getPartitionsViaOrmFilter(dbName, tblName, partNames);
}
return result;
}
}.run(true));
return hasUnknownPartitions.get();
}
private class LikeChecker extends ExpressionTree.TreeVisitor {
private boolean hasLike;
public boolean hasLike() {
return hasLike;
}
@Override
protected boolean shouldStop() {
return hasLike;
}
@Override
protected void visit(LeafNode node) throws MetaException {
hasLike = hasLike || (node.operator == Operator.LIKE);
}
}
/**
* Makes expression tree out of expr.
* @param filter Filter.
* @return Expression tree. Null if there was an error.
*/
private ExpressionTree makeExpressionTree(String filter) throws MetaException {
// TODO: ExprNodeDesc is an expression tree, we could just use that and be rid of Filter.g.
if (filter == null || filter.isEmpty()) {
return ExpressionTree.EMPTY_TREE;
}
LOG.debug("Filter specified is " + filter);
ExpressionTree tree = null;
try {
tree = getFilterParser(filter).tree;
} catch (MetaException ex) {
LOG.info("Unable to make the expression tree from expression string ["
+ filter + "]" + ex.getMessage()); // Don't log the stack, this is normal.
}
if (tree == null) {
return null;
}
// We suspect that LIKE pushdown into JDO is invalid; see HIVE-5134. Check for like here.
LikeChecker lc = new LikeChecker();
tree.accept(lc);
return lc.hasLike() ? null : tree;
}
/**
* Gets the partition names from a table, pruned using an expression.
* @param table Table.
* @param expr Expression.
* @param defaultPartName Default partition name from job config, if any.
* @param maxParts Maximum number of partition names to return.
* @param result The resulting names.
* @return Whether the result contains any unknown partitions.
*/
private boolean getPartitionNamesPrunedByExprNoTxn(Table table, byte[] expr,
String defaultPartName, short maxParts, List<String> result) throws MetaException {
result.addAll(getPartitionNamesNoTxn(
table.getDbName(), table.getTableName(), maxParts));
List<String> columnNames = new ArrayList<String>();
List<PrimitiveTypeInfo> typeInfos = new ArrayList<PrimitiveTypeInfo>();
for (FieldSchema fs : table.getPartitionKeys()) {
columnNames.add(fs.getName());
typeInfos.add(TypeInfoFactory.getPrimitiveTypeInfo(fs.getType()));
}
if (defaultPartName == null || defaultPartName.isEmpty()) {
defaultPartName = HiveConf.getVar(getConf(), HiveConf.ConfVars.DEFAULTPARTITIONNAME);
}
return expressionProxy.filterPartitionsByExpr(
columnNames, typeInfos, expr, defaultPartName, result);
}
/**
* Gets partition names from the table via ORM (JDOQL) filter pushdown.
* @param table The table.
* @param tree The expression tree from which JDOQL filter will be made.
* @param maxParts Maximum number of partitions to return.
* @param isValidatedFilter Whether the filter was pre-validated for JDOQL pushdown by a client
* (old hive client or non-hive one); if it was and we fail to create a filter, we will throw.
* @return Resulting partitions. Can be null if isValidatedFilter is false, and
* there was error deriving the JDO filter.
*/
private List<Partition> getPartitionsViaOrmFilter(Table table, ExpressionTree tree,
short maxParts, boolean isValidatedFilter) throws MetaException {
Map<String, Object> params = new HashMap<String, Object>();
String jdoFilter = makeQueryFilterString(
table.getDbName(), table, tree, params, isValidatedFilter);
if (jdoFilter == null) {
assert !isValidatedFilter;
return null;
}
Query query = pm.newQuery(MPartition.class, jdoFilter);
if (maxParts >= 0) {
// User specified a row limit, set it on the Query
query.setRange(0, maxParts);
}
String parameterDeclaration = makeParameterDeclarationStringObj(params);
query.declareParameters(parameterDeclaration);
query.setOrdering("partitionName ascending");
@SuppressWarnings("unchecked")
List<MPartition> mparts = (List<MPartition>) query.executeWithMap(params);
LOG.debug("Done executing query for getPartitionsViaOrmFilter");
pm.retrieveAll(mparts); // TODO: why is this inconsistent with what we get by names?
LOG.debug("Done retrieving all objects for getPartitionsViaOrmFilter");
List<Partition> results = convertToParts(mparts);
query.closeAll();
return results;
}
private static class Out<T> {
public T val;
}
/**
* Gets partition names from the table via ORM (JDOQL) name filter.
* @param dbName Database name.
* @param tblName Table name.
* @param partNames Partition names to get the objects for.
* @return Resulting partitions.
*/
private List<Partition> getPartitionsViaOrmFilter(
String dbName, String tblName, List<String> partNames) throws MetaException {
if (partNames.isEmpty()) {
return new ArrayList<Partition>();
}
Out<Query> query = new Out<Query>();
List<MPartition> mparts = null;
try {
mparts = getMPartitionsViaOrmFilter(dbName, tblName, partNames, query);
return convertToParts(dbName, tblName, mparts);
} finally {
if (query.val != null) {
query.val.closeAll();
}
}
}
private void dropPartitionsNoTxn(String dbName, String tblName, List<String> partNames) {
ObjectPair<Query, Map<String, String>> queryWithParams =
getPartQueryWithParams(dbName, tblName, partNames);
Query query = queryWithParams.getFirst();
query.setClass(MPartition.class);
long deleted = query.deletePersistentAll(queryWithParams.getSecond());
LOG.debug("Deleted " + deleted + " partition from store");
query.closeAll();
}
/**
* Detaches column descriptors from storage descriptors; returns the set of unique CDs
* thus detached. This is done before dropping partitions because CDs are reused between
* SDs; so, we remove the links to delete SDs and then check the returned CDs to see if
* they are referenced by other SDs.
*/
private HashSet<MColumnDescriptor> detachCdsFromSdsNoTxn(
String dbName, String tblName, List<String> partNames) {
ObjectPair<Query, Map<String, String>> queryWithParams =
getPartQueryWithParams(dbName, tblName, partNames);
Query query = queryWithParams.getFirst();
query.setClass(MPartition.class);
query.setResult("sd");
@SuppressWarnings("unchecked")
List<MStorageDescriptor> sds = (List<MStorageDescriptor>)query.executeWithMap(
queryWithParams.getSecond());
HashSet<MColumnDescriptor> candidateCds = new HashSet<MColumnDescriptor>();
for (MStorageDescriptor sd : sds) {
if (sd != null && sd.getCD() != null) {
candidateCds.add(sd.getCD());
sd.setCD(null);
}
}
return candidateCds;
}
private List<MPartition> getMPartitionsViaOrmFilter(String dbName,
String tblName, List<String> partNames, Out<Query> out) {
ObjectPair<Query, Map<String, String>> queryWithParams =
getPartQueryWithParams(dbName, tblName, partNames);
Query query = out.val = queryWithParams.getFirst();
query.setResultClass(MPartition.class);
query.setClass(MPartition.class);
query.setOrdering("partitionName ascending");
@SuppressWarnings("unchecked")
List<MPartition> result = (List<MPartition>)query.executeWithMap(queryWithParams.getSecond());
return result;
}
private ObjectPair<Query, Map<String, String>> getPartQueryWithParams(
String dbName, String tblName, List<String> partNames) {
StringBuilder sb = new StringBuilder(
"table.tableName == t1 && table.database.name == t2 && (");
int n = 0;
Map<String, String> params = new HashMap<String, String>();
for (Iterator<String> itr = partNames.iterator(); itr.hasNext();) {
String pn = "p" + n;
n++;
String part = itr.next();
params.put(pn, part);
sb.append("partitionName == ").append(pn);
sb.append(" || ");
}
sb.setLength(sb.length() - 4); // remove the last " || "
sb.append(')');
Query query = pm.newQuery();
query.setFilter(sb.toString());
LOG.debug(" JDOQL filter is " + sb.toString());
params.put("t1", HiveStringUtils.normalizeIdentifier(tblName));
params.put("t2", HiveStringUtils.normalizeIdentifier(dbName));
query.declareParameters(makeParameterDeclarationString(params));
return new ObjectPair<Query, Map<String,String>>(query, params);
}
@Override
public List<Partition> getPartitionsByFilter(String dbName, String tblName,
String filter, short maxParts) throws MetaException, NoSuchObjectException {
return getPartitionsByFilterInternal(dbName, tblName, filter, maxParts, true, true);
}
/** Helper class for getting stuff w/transaction, direct SQL, perf logging, etc. */
private abstract class GetHelper<T> {
private final boolean isInTxn, doTrace, allowJdo;
private boolean doUseDirectSql;
private long start;
private Table table;
protected final String dbName, tblName;
private boolean success = false;
protected T results = null;
public GetHelper(String dbName, String tblName, boolean allowSql, boolean allowJdo)
throws MetaException {
assert allowSql || allowJdo;
this.allowJdo = allowJdo;
this.dbName = HiveStringUtils.normalizeIdentifier(dbName);
if (tblName != null){
this.tblName = HiveStringUtils.normalizeIdentifier(tblName);
} else {
// tblName can be null in cases of Helper being used at a higher
// abstraction level, such as with datbases
this.tblName = null;
this.table = null;
}
this.doTrace = LOG.isDebugEnabled();
this.isInTxn = isActiveTransaction();
// SQL usage inside a larger transaction (e.g. droptable) may not be desirable because
// some databases (e.g. Postgres) abort the entire transaction when any query fails, so
// the fallback from failed SQL to JDO is not possible.
boolean isConfigEnabled = HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL)
&& (HiveConf.getBoolVar(getConf(), ConfVars.METASTORE_TRY_DIRECT_SQL_DDL) || !isInTxn);
if (!allowJdo && isConfigEnabled && !directSql.isCompatibleDatastore()) {
throw new MetaException("SQL is not operational"); // test path; SQL is enabled and broken.
}
this.doUseDirectSql = allowSql && isConfigEnabled && directSql.isCompatibleDatastore();
}
protected abstract String describeResult();
protected abstract T getSqlResult(GetHelper<T> ctx) throws MetaException;
protected abstract T getJdoResult(
GetHelper<T> ctx) throws MetaException, NoSuchObjectException;
public T run(boolean initTable) throws MetaException, NoSuchObjectException {
try {
start(initTable);
if (doUseDirectSql) {
try {
setResult(getSqlResult(this));
} catch (Exception ex) {
handleDirectSqlError(ex);
}
}
if (!doUseDirectSql) {
setResult(getJdoResult(this));
}
return commit();
} catch (NoSuchObjectException ex) {
throw ex;
} catch (MetaException ex) {
throw ex;
} catch (Exception ex) {
LOG.error("", ex);
throw new MetaException(ex.getMessage());
} finally {
close();
}
}
private void start(boolean initTable) throws MetaException, NoSuchObjectException {
start = doTrace ? System.nanoTime() : 0;
openTransaction();
if (initTable && (tblName != null)) {
table = ensureGetTable(dbName, tblName);
}
}
private boolean setResult(T results) {
this.results = results;
return this.results != null;
}
private void handleDirectSqlError(Exception ex) throws MetaException, NoSuchObjectException {
LOG.warn("Direct SQL failed" + (allowJdo ? ", falling back to ORM" : ""), ex);
if (!allowJdo) {
if (ex instanceof MetaException) {
throw (MetaException)ex;
}
throw new MetaException(ex.getMessage());
}
if (!isInTxn) {
rollbackTransaction();
start = doTrace ? System.nanoTime() : 0;
openTransaction();
if (table != null) {
table = ensureGetTable(dbName, tblName);
}
} else {
start = doTrace ? System.nanoTime() : 0;
}
doUseDirectSql = false;
}
public void disableDirectSql() {
this.doUseDirectSql = false;
}
private T commit() {
success = commitTransaction();
if (doTrace) {
LOG.debug(describeResult() + " retrieved using " + (doUseDirectSql ? "SQL" : "ORM")
+ " in " + ((System.nanoTime() - start) / 1000000.0) + "ms");
}
return results;
}
private void close() {
if (!success) {
rollbackTransaction();
}
}
public Table getTable() {
return table;
}
}
private abstract class GetListHelper<T> extends GetHelper<List<T>> {
public GetListHelper(
String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException {
super(dbName, tblName, allowSql, allowJdo);
}
@Override
protected String describeResult() {
return results.size() + " entries";
}
}
private abstract class GetDbHelper extends GetHelper<Database> {
/**
* GetHelper for returning db info using directSql/JDO.
* Since this is a db-level call, tblName is ignored, and null is passed irrespective of what is passed in.
* @param dbName The Database Name
* @param tblName Placeholder param to match signature, always ignored.
* @param allowSql Whether or not we allow DirectSQL to perform this query.
* @param allowJdo Whether or not we allow ORM to perform this query.
* @throws MetaException
*/
public GetDbHelper(
String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException {
super(dbName,null,allowSql,allowJdo);
}
@Override
protected String describeResult() {
return "db details for db " + dbName;
}
}
private abstract class GetStatHelper extends GetHelper<ColumnStatistics> {
public GetStatHelper(
String dbName, String tblName, boolean allowSql, boolean allowJdo) throws MetaException {
super(dbName, tblName, allowSql, allowJdo);
}
@Override
protected String describeResult() {
return "statistics for " + (results == null ? 0 : results.getStatsObjSize()) + " columns";
}
}
protected List<Partition> getPartitionsByFilterInternal(String dbName, String tblName,
String filter, final short maxParts, boolean allowSql, boolean allowJdo)
throws MetaException, NoSuchObjectException {
final ExpressionTree tree = (filter != null && !filter.isEmpty())
? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE;
return new GetListHelper<Partition>(dbName, tblName, allowSql, allowJdo) {
@Override
protected List<Partition> getSqlResult(GetHelper<List<Partition>> ctx) throws MetaException {
List<Partition> parts = directSql.getPartitionsViaSqlFilter(
ctx.getTable(), tree, (maxParts < 0) ? null : (int)maxParts);
if (parts == null) {
// Cannot push down SQL filter. The message has been logged internally.
// This is not an error so don't roll back, just go to JDO.
ctx.disableDirectSql();
}
return parts;
}
@Override
protected List<Partition> getJdoResult(
GetHelper<List<Partition>> ctx) throws MetaException, NoSuchObjectException {
return getPartitionsViaOrmFilter(ctx.getTable(), tree, maxParts, true);
}
}.run(true);
}
/**
* Gets the table object for a given table, throws if anything goes wrong.
* @param dbName Database name.
* @param tblName Table name.
* @return Table object.
*/
private MTable ensureGetMTable(
String dbName, String tblName) throws NoSuchObjectException, MetaException {
MTable mtable = getMTable(dbName, tblName);
if (mtable == null) {
throw new NoSuchObjectException("Specified database/table does not exist : "
+ dbName + "." + tblName);
}
return mtable;
}
private Table ensureGetTable(
String dbName, String tblName) throws NoSuchObjectException, MetaException {
return convertToTable(ensureGetMTable(dbName, tblName));
}
private FilterParser getFilterParser(String filter) throws MetaException {
FilterLexer lexer = new FilterLexer(new ANTLRNoCaseStringStream(filter));
CommonTokenStream tokens = new CommonTokenStream(lexer);
FilterParser parser = new FilterParser(tokens);
try {
parser.filter();
} catch(RecognitionException re) {
throw new MetaException("Error parsing partition filter; lexer error: "
+ lexer.errorMsg + "; exception " + re);
}
if (lexer.errorMsg != null) {
throw new MetaException("Error parsing partition filter : " + lexer.errorMsg);
}
return parser;
}
/**
* Makes a JDO query filter string.
* Makes a JDO query filter string for tables or partitions.
* @param dbName Database name.
* @param mtable Table. If null, the query returned is over tables in a database.
* If not null, the query returned is over partitions in a table.
* @param filter The filter from which JDOQL filter will be made.
* @param params Parameters for the filter. Some parameters may be added here.
* @return Resulting filter.
*/
private String makeQueryFilterString(String dbName, MTable mtable, String filter,
Map<String, Object> params) throws MetaException {
ExpressionTree tree = (filter != null && !filter.isEmpty())
? getFilterParser(filter).tree : ExpressionTree.EMPTY_TREE;
return makeQueryFilterString(dbName, convertToTable(mtable), tree, params, true);
}
/**
* Makes a JDO query filter string for tables or partitions.
* @param dbName Database name.
* @param table Table. If null, the query returned is over tables in a database.
* If not null, the query returned is over partitions in a table.
* @param tree The expression tree from which JDOQL filter will be made.
* @param params Parameters for the filter. Some parameters may be added here.
* @param isValidatedFilter Whether the filter was pre-validated for JDOQL pushdown
* by the client; if it was and we fail to create a filter, we will throw.
* @return Resulting filter. Can be null if isValidatedFilter is false, and there was error.
*/
private String makeQueryFilterString(String dbName, Table table, ExpressionTree tree,
Map<String, Object> params, boolean isValidatedFilter) throws MetaException {
assert tree != null;
FilterBuilder queryBuilder = new FilterBuilder(isValidatedFilter);
if (table != null) {
queryBuilder.append("table.tableName == t1 && table.database.name == t2");
params.put("t1", table.getTableName());
params.put("t2", table.getDbName());
} else {
queryBuilder.append("database.name == dbName");
params.put("dbName", dbName);
}
tree.generateJDOFilterFragment(getConf(), table, params, queryBuilder);
if (queryBuilder.hasError()) {
assert !isValidatedFilter;
LOG.info("JDO filter pushdown cannot be used: " + queryBuilder.getErrorMessage());
return null;
}
String jdoFilter = queryBuilder.getFilter();
LOG.debug("jdoFilter = " + jdoFilter);
return jdoFilter;
}
private String makeParameterDeclarationString(Map<String, String> params) {
//Create the parameter declaration string
StringBuilder paramDecl = new StringBuilder();
for (String key : params.keySet()) {
paramDecl.append(", java.lang.String " + key);
}
return paramDecl.toString();
}
private String makeParameterDeclarationStringObj(Map<String, Object> params) {
//Create the parameter declaration string
StringBuilder paramDecl = new StringBuilder();
for (Entry<String, Object> entry : params.entrySet()) {
paramDecl.append(", ");
paramDecl.append(entry.getValue().getClass().getName());
paramDecl.append(" ");
paramDecl.append(entry.getKey());
}
return paramDecl.toString();
}
@Override
public List<String> listTableNamesByFilter(String dbName, String filter, short maxTables)
throws MetaException {
boolean success = false;
List<String> tableNames = new ArrayList<String>();
try {
openTransaction();
LOG.debug("Executing listTableNamesByFilter");
dbName = HiveStringUtils.normalizeIdentifier(dbName);
Map<String, Object> params = new HashMap<String, Object>();
String queryFilterString = makeQueryFilterString(dbName, null, filter, params);
Query query = pm.newQuery(MTable.class);
query.declareImports("import java.lang.String");
query.setResult("tableName");
query.setResultClass(java.lang.String.class);
if (maxTables >= 0) {
query.setRange(0, maxTables);
}
LOG.debug("filter specified is " + filter + "," + " JDOQL filter is " + queryFilterString);
for (Entry<String, Object> entry : params.entrySet()) {
LOG.debug("key: " + entry.getKey() + " value: " + entry.getValue() +
" class: " + entry.getValue().getClass().getName());
}
String parameterDeclaration = makeParameterDeclarationStringObj(params);
query.declareParameters(parameterDeclaration);
query.setFilter(queryFilterString);
Collection names = (Collection) query.executeWithMap(params);
//have to emulate "distinct", otherwise tables with the same name may be returned
Set<String> tableNamesSet = new HashSet<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
tableNamesSet.add((String) i.next());
}
tableNames = new ArrayList<String>(tableNamesSet);
LOG.debug("Done executing query for listTableNamesByFilter");
success = commitTransaction();
LOG.debug("Done retrieving all objects for listTableNamesByFilter");
} finally {
if (!success) {
rollbackTransaction();
}
}
return tableNames;
}
@Override
public List<String> listPartitionNamesByFilter(String dbName, String tableName,
String filter, short maxParts) throws MetaException {
boolean success = false;
List<String> partNames = new ArrayList<String>();
try {
openTransaction();
LOG.debug("Executing listMPartitionNamesByFilter");
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
MTable mtable = getMTable(dbName, tableName);
if( mtable == null ) {
// To be consistent with the behavior of listPartitionNames, if the
// table or db does not exist, we return an empty list
return partNames;
}
Map<String, Object> params = new HashMap<String, Object>();
String queryFilterString = makeQueryFilterString(dbName, mtable, filter, params);
Query query = pm.newQuery(
"select partitionName from org.apache.hadoop.hive.metastore.model.MPartition "
+ "where " + queryFilterString);
if( maxParts >= 0 ) {
//User specified a row limit, set it on the Query
query.setRange(0, maxParts);
}
LOG.debug("Filter specified is " + filter + "," +
" JDOQL filter is " + queryFilterString);
LOG.debug("Parms is " + params);
String parameterDeclaration = makeParameterDeclarationStringObj(params);
query.declareParameters(parameterDeclaration);
query.setOrdering("partitionName ascending");
query.setResult("partitionName");
Collection names = (Collection) query.executeWithMap(params);
partNames = new ArrayList<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
partNames.add((String) i.next());
}
LOG.debug("Done executing query for listMPartitionNamesByFilter");
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMPartitionNamesByFilter");
} finally {
if (!success) {
rollbackTransaction();
}
}
return partNames;
}
@Override
public void alterTable(String dbname, String name, Table newTable)
throws InvalidObjectException, MetaException {
boolean success = false;
try {
openTransaction();
name = HiveStringUtils.normalizeIdentifier(name);
dbname = HiveStringUtils.normalizeIdentifier(dbname);
MTable newt = convertToMTable(newTable);
if (newt == null) {
throw new InvalidObjectException("new table is invalid");
}
MTable oldt = getMTable(dbname, name);
if (oldt == null) {
throw new MetaException("table " + name + " doesn't exist");
}
// For now only alter name, owner, parameters, cols, bucketcols are allowed
oldt.setDatabase(newt.getDatabase());
oldt.setTableName(HiveStringUtils.normalizeIdentifier(newt.getTableName()));
oldt.setParameters(newt.getParameters());
oldt.setOwner(newt.getOwner());
// Fully copy over the contents of the new SD into the old SD,
// so we don't create an extra SD in the metastore db that has no references.
copyMSD(newt.getSd(), oldt.getSd());
oldt.setRetention(newt.getRetention());
oldt.setPartitionKeys(newt.getPartitionKeys());
oldt.setTableType(newt.getTableType());
oldt.setLastAccessTime(newt.getLastAccessTime());
oldt.setViewOriginalText(newt.getViewOriginalText());
oldt.setViewExpandedText(newt.getViewExpandedText());
// commit the changes
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public void alterIndex(String dbname, String baseTblName, String name, Index newIndex)
throws InvalidObjectException, MetaException {
boolean success = false;
try {
openTransaction();
name = HiveStringUtils.normalizeIdentifier(name);
baseTblName = HiveStringUtils.normalizeIdentifier(baseTblName);
dbname = HiveStringUtils.normalizeIdentifier(dbname);
MIndex newi = convertToMIndex(newIndex);
if (newi == null) {
throw new InvalidObjectException("new index is invalid");
}
MIndex oldi = getMIndex(dbname, baseTblName, name);
if (oldi == null) {
throw new MetaException("index " + name + " doesn't exist");
}
// For now only alter parameters are allowed
oldi.setParameters(newi.getParameters());
// commit the changes
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private void alterPartitionNoTxn(String dbname, String name, List<String> part_vals,
Partition newPart) throws InvalidObjectException, MetaException {
name = HiveStringUtils.normalizeIdentifier(name);
dbname = HiveStringUtils.normalizeIdentifier(dbname);
MPartition oldp = getMPartition(dbname, name, part_vals);
MPartition newp = convertToMPart(newPart, false);
if (oldp == null || newp == null) {
throw new InvalidObjectException("partition does not exist.");
}
oldp.setValues(newp.getValues());
oldp.setPartitionName(newp.getPartitionName());
oldp.setParameters(newPart.getParameters());
if (!TableType.VIRTUAL_VIEW.name().equals(oldp.getTable().getTableType())) {
copyMSD(newp.getSd(), oldp.getSd());
}
if (newp.getCreateTime() != oldp.getCreateTime()) {
oldp.setCreateTime(newp.getCreateTime());
}
if (newp.getLastAccessTime() != oldp.getLastAccessTime()) {
oldp.setLastAccessTime(newp.getLastAccessTime());
}
}
@Override
public void alterPartition(String dbname, String name, List<String> part_vals, Partition newPart)
throws InvalidObjectException, MetaException {
boolean success = false;
Exception e = null;
try {
openTransaction();
alterPartitionNoTxn(dbname, name, part_vals, newPart);
// commit the changes
success = commitTransaction();
} catch (Exception exception) {
e = exception;
} finally {
if (!success) {
rollbackTransaction();
MetaException metaException = new MetaException(
"The transaction for alter partition did not commit successfully.");
if (e != null) {
metaException.initCause(e);
}
throw metaException;
}
}
}
@Override
public void alterPartitions(String dbname, String name, List<List<String>> part_vals,
List<Partition> newParts) throws InvalidObjectException, MetaException {
boolean success = false;
Exception e = null;
try {
openTransaction();
Iterator<List<String>> part_val_itr = part_vals.iterator();
for (Partition tmpPart: newParts) {
List<String> tmpPartVals = part_val_itr.next();
alterPartitionNoTxn(dbname, name, tmpPartVals, tmpPart);
}
// commit the changes
success = commitTransaction();
} catch (Exception exception) {
e = exception;
} finally {
if (!success) {
rollbackTransaction();
MetaException metaException = new MetaException(
"The transaction for alter partition did not commit successfully.");
if (e != null) {
metaException.initCause(e);
}
throw metaException;
}
}
}
private void copyMSD(MStorageDescriptor newSd, MStorageDescriptor oldSd) {
oldSd.setLocation(newSd.getLocation());
MColumnDescriptor oldCD = oldSd.getCD();
// If the columns of the old column descriptor != the columns of the new one,
// then change the old storage descriptor's column descriptor.
// Convert the MFieldSchema's to their thrift object counterparts, because we maintain
// datastore identity (i.e., identity of the model objects are managed by JDO,
// not the application).
if (!(oldSd != null && oldSd.getCD() != null &&
oldSd.getCD().getCols() != null &&
newSd != null && newSd.getCD() != null &&
newSd.getCD().getCols() != null &&
convertToFieldSchemas(newSd.getCD().getCols()).
equals(convertToFieldSchemas(oldSd.getCD().getCols()))
)) {
oldSd.setCD(newSd.getCD());
}
//If oldCd does not have any more references, then we should delete it
// from the backend db
removeUnusedColumnDescriptor(oldCD);
oldSd.setBucketCols(newSd.getBucketCols());
oldSd.setCompressed(newSd.isCompressed());
oldSd.setInputFormat(newSd.getInputFormat());
oldSd.setOutputFormat(newSd.getOutputFormat());
oldSd.setNumBuckets(newSd.getNumBuckets());
oldSd.getSerDeInfo().setName(newSd.getSerDeInfo().getName());
oldSd.getSerDeInfo().setSerializationLib(
newSd.getSerDeInfo().getSerializationLib());
oldSd.getSerDeInfo().setParameters(newSd.getSerDeInfo().getParameters());
oldSd.setSkewedColNames(newSd.getSkewedColNames());
oldSd.setSkewedColValues(newSd.getSkewedColValues());
oldSd.setSkewedColValueLocationMaps(newSd.getSkewedColValueLocationMaps());
oldSd.setSortCols(newSd.getSortCols());
oldSd.setParameters(newSd.getParameters());
oldSd.setStoredAsSubDirectories(newSd.isStoredAsSubDirectories());
}
/**
* Checks if a column descriptor has any remaining references by storage descriptors
* in the db. If it does not, then delete the CD. If it does, then do nothing.
* @param oldCD the column descriptor to delete if it is no longer referenced anywhere
*/
private void removeUnusedColumnDescriptor(MColumnDescriptor oldCD) {
if (oldCD == null) {
return;
}
boolean success = false;
try {
openTransaction();
LOG.debug("execute removeUnusedColumnDescriptor");
List<MStorageDescriptor> referencedSDs = listStorageDescriptorsWithCD(oldCD, 1);
//if no other SD references this CD, we can throw it out.
if (referencedSDs != null && referencedSDs.isEmpty()) {
pm.retrieve(oldCD);
pm.deletePersistent(oldCD);
}
success = commitTransaction();
LOG.debug("successfully deleted a CD in removeUnusedColumnDescriptor");
} finally {
if (!success) {
rollbackTransaction();
}
}
}
/**
* Called right before an action that would drop a storage descriptor.
* This function makes the SD's reference to a CD null, and then deletes the CD
* if it no longer is referenced in the table.
* @param msd the storage descriptor to drop
*/
private void preDropStorageDescriptor(MStorageDescriptor msd) {
if (msd == null || msd.getCD() == null) {
return;
}
MColumnDescriptor mcd = msd.getCD();
// Because there is a 1-N relationship between CDs and SDs,
// we must set the SD's CD to null first before dropping the storage descriptor
// to satisfy foreign key constraints.
msd.setCD(null);
removeUnusedColumnDescriptor(mcd);
}
/**
* Get a list of storage descriptors that reference a particular Column Descriptor
* @param oldCD the column descriptor to get storage descriptors for
* @param maxSDs the maximum number of SDs to return
* @return a list of storage descriptors
*/
private List<MStorageDescriptor> listStorageDescriptorsWithCD(MColumnDescriptor oldCD,
long maxSDs) {
boolean success = false;
List<MStorageDescriptor> sds = null;
try {
openTransaction();
LOG.debug("Executing listStorageDescriptorsWithCD");
Query query = pm.newQuery(MStorageDescriptor.class,
"this.cd == inCD");
query.declareParameters("MColumnDescriptor inCD");
if(maxSDs >= 0) {
//User specified a row limit, set it on the Query
query.setRange(0, maxSDs);
}
sds = (List<MStorageDescriptor>) query.execute(oldCD);
LOG.debug("Done executing query for listStorageDescriptorsWithCD");
pm.retrieveAll(sds);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listStorageDescriptorsWithCD");
} finally {
if (!success) {
rollbackTransaction();
}
}
return sds;
}
@Override
public boolean addIndex(Index index) throws InvalidObjectException,
MetaException {
boolean commited = false;
try {
openTransaction();
MIndex idx = convertToMIndex(index);
pm.makePersistent(idx);
commited = commitTransaction();
return true;
} finally {
if (!commited) {
rollbackTransaction();
return false;
}
}
}
private MIndex convertToMIndex(Index index) throws InvalidObjectException,
MetaException {
StorageDescriptor sd = index.getSd();
if (sd == null) {
throw new InvalidObjectException("Storage descriptor is not defined for index.");
}
MStorageDescriptor msd = this.convertToMStorageDescriptor(sd);
MTable origTable = getMTable(index.getDbName(), index.getOrigTableName());
if (origTable == null) {
throw new InvalidObjectException(
"Original table does not exist for the given index.");
}
String[] qualified = MetaStoreUtils.getQualifiedName(index.getDbName(), index.getIndexTableName());
MTable indexTable = getMTable(qualified[0], qualified[1]);
if (indexTable == null) {
throw new InvalidObjectException(
"Underlying index table does not exist for the given index.");
}
return new MIndex(HiveStringUtils.normalizeIdentifier(index.getIndexName()), origTable, index.getCreateTime(),
index.getLastAccessTime(), index.getParameters(), indexTable, msd,
index.getIndexHandlerClass(), index.isDeferredRebuild());
}
@Override
public boolean dropIndex(String dbName, String origTableName, String indexName)
throws MetaException {
boolean success = false;
try {
openTransaction();
MIndex index = getMIndex(dbName, origTableName, indexName);
if (index != null) {
pm.deletePersistent(index);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
private MIndex getMIndex(String dbName, String originalTblName, String indexName) throws MetaException {
MIndex midx = null;
boolean commited = false;
try {
openTransaction();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
originalTblName = HiveStringUtils.normalizeIdentifier(originalTblName);
MTable mtbl = getMTable(dbName, originalTblName);
if (mtbl == null) {
commited = commitTransaction();
return null;
}
Query query = pm.newQuery(MIndex.class,
"origTable.tableName == t1 && origTable.database.name == t2 && indexName == t3");
query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
query.setUnique(true);
midx = (MIndex) query.execute(originalTblName, dbName,
HiveStringUtils.normalizeIdentifier(indexName));
pm.retrieve(midx);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return midx;
}
@Override
public Index getIndex(String dbName, String origTableName, String indexName)
throws MetaException {
openTransaction();
MIndex mIndex = this.getMIndex(dbName, origTableName, indexName);
Index ret = convertToIndex(mIndex);
commitTransaction();
return ret;
}
private Index convertToIndex(MIndex mIndex) throws MetaException {
if (mIndex == null) {
return null;
}
MTable origTable = mIndex.getOrigTable();
MTable indexTable = mIndex.getIndexTable();
String[] qualified = MetaStoreUtils.getQualifiedName(
origTable.getDatabase().getName(), indexTable.getTableName());
String indexTableName = qualified[0] + "." + qualified[1];
return new Index(
mIndex.getIndexName(),
mIndex.getIndexHandlerClass(),
origTable.getDatabase().getName(),
origTable.getTableName(),
mIndex.getCreateTime(),
mIndex.getLastAccessTime(),
indexTableName,
convertToStorageDescriptor(mIndex.getSd()),
mIndex.getParameters(),
mIndex.getDeferredRebuild());
}
@Override
public List<Index> getIndexes(String dbName, String origTableName, int max)
throws MetaException {
boolean success = false;
try {
openTransaction();
List<MIndex> mIndexList = listMIndexes(dbName, origTableName, max);
List<Index> indexes = new ArrayList<Index>(mIndexList.size());
for (MIndex midx : mIndexList) {
indexes.add(this.convertToIndex(midx));
}
success = commitTransaction();
return indexes;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<MIndex> listMIndexes(String dbName, String origTableName,
int max) {
boolean success = false;
List<MIndex> mindexes = null;
try {
openTransaction();
LOG.debug("Executing listMIndexes");
dbName = HiveStringUtils.normalizeIdentifier(dbName);
origTableName = HiveStringUtils.normalizeIdentifier(origTableName);
Query query = pm.newQuery(MIndex.class,
"origTable.tableName == t1 && origTable.database.name == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mindexes = (List<MIndex>) query.execute(origTableName, dbName);
LOG.debug("Done executing query for listMIndexes");
pm.retrieveAll(mindexes);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMIndexes");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mindexes;
}
@Override
public List<String> listIndexNames(String dbName, String origTableName,
short max) throws MetaException {
List<String> pns = new ArrayList<String>();
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listIndexNames");
dbName = HiveStringUtils.normalizeIdentifier(dbName);
origTableName = HiveStringUtils.normalizeIdentifier(origTableName);
Query q = pm.newQuery(
"select indexName from org.apache.hadoop.hive.metastore.model.MIndex "
+ "where origTable.database.name == t1 && origTable.tableName == t2 "
+ "order by indexName asc");
q.declareParameters("java.lang.String t1, java.lang.String t2");
q.setResult("indexName");
Collection names = (Collection) q.execute(dbName, origTableName);
for (Iterator i = names.iterator(); i.hasNext();) {
pns.add((String) i.next());
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return pns;
}
@Override
public boolean addRole(String roleName, String ownerName)
throws InvalidObjectException, MetaException, NoSuchObjectException {
boolean success = false;
boolean commited = false;
try {
openTransaction();
MRole nameCheck = this.getMRole(roleName);
if (nameCheck != null) {
throw new InvalidObjectException("Role " + roleName + " already exists.");
}
int now = (int)(System.currentTimeMillis()/1000);
MRole mRole = new MRole(roleName, now,
ownerName);
pm.makePersistent(mRole);
commited = commitTransaction();
success = true;
} finally {
if (!commited) {
rollbackTransaction();
}
}
return success;
}
@Override
public boolean grantRole(Role role, String userName,
PrincipalType principalType, String grantor, PrincipalType grantorType,
boolean grantOption) throws MetaException, NoSuchObjectException,InvalidObjectException {
boolean success = false;
boolean commited = false;
try {
openTransaction();
MRoleMap roleMap = null;
try {
roleMap = this.getMSecurityUserRoleMap(userName, principalType, role
.getRoleName());
} catch (Exception e) {
}
if (roleMap != null) {
throw new InvalidObjectException("Principal " + userName
+ " already has the role " + role.getRoleName());
}
if (principalType == PrincipalType.ROLE) {
validateRole(userName);
}
MRole mRole = getMRole(role.getRoleName());
long now = System.currentTimeMillis()/1000;
MRoleMap roleMember = new MRoleMap(userName, principalType.toString(),
mRole, (int) now, grantor, grantorType.toString(), grantOption);
pm.makePersistent(roleMember);
commited = commitTransaction();
success = true;
} finally {
if (!commited) {
rollbackTransaction();
}
}
return success;
}
/**
* Verify that role with given name exists, if not throw exception
* @param roleName
* @throws NoSuchObjectException
*/
private void validateRole(String roleName) throws NoSuchObjectException {
// if grantee is a role, check if it exists
MRole granteeRole = getMRole(roleName);
if (granteeRole == null) {
throw new NoSuchObjectException("Role " + roleName + " does not exist");
}
}
@Override
public boolean revokeRole(Role role, String userName, PrincipalType principalType,
boolean grantOption) throws MetaException, NoSuchObjectException {
boolean success = false;
try {
openTransaction();
MRoleMap roleMember = getMSecurityUserRoleMap(userName, principalType,
role.getRoleName());
if (grantOption) {
// Revoke with grant option - only remove the grant option but keep the role.
if (roleMember.getGrantOption()) {
roleMember.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with role " + role.getRoleName());
}
} else {
// No grant option in revoke, remove the whole role.
pm.deletePersistent(roleMember);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
private MRoleMap getMSecurityUserRoleMap(String userName,
PrincipalType principalType, String roleName) {
MRoleMap mRoleMember = null;
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MRoleMap.class, "principalName == t1 && principalType == t2 && role.roleName == t3");
query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
query.setUnique(true);
mRoleMember = (MRoleMap) query.executeWithArray(userName, principalType.toString(), roleName);
pm.retrieve(mRoleMember);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return mRoleMember;
}
@Override
public boolean removeRole(String roleName) throws MetaException,
NoSuchObjectException {
boolean success = false;
try {
openTransaction();
MRole mRol = getMRole(roleName);
pm.retrieve(mRol);
if (mRol != null) {
// first remove all the membership, the membership that this role has
// been granted
List<MRoleMap> roleMap = listRoleMembers(mRol.getRoleName());
if (roleMap.size() > 0) {
pm.deletePersistentAll(roleMap);
}
List<MRoleMap> roleMember = listMSecurityPrincipalMembershipRole(mRol
.getRoleName(), PrincipalType.ROLE);
if (roleMember.size() > 0) {
pm.deletePersistentAll(roleMember);
}
// then remove all the grants
List<MGlobalPrivilege> userGrants = listPrincipalGlobalGrants(
mRol.getRoleName(), PrincipalType.ROLE);
if (userGrants.size() > 0) {
pm.deletePersistentAll(userGrants);
}
List<MDBPrivilege> dbGrants = listPrincipalAllDBGrant(mRol
.getRoleName(), PrincipalType.ROLE);
if (dbGrants.size() > 0) {
pm.deletePersistentAll(dbGrants);
}
List<MTablePrivilege> tabPartGrants = listPrincipalAllTableGrants(
mRol.getRoleName(), PrincipalType.ROLE);
if (tabPartGrants.size() > 0) {
pm.deletePersistentAll(tabPartGrants);
}
List<MPartitionPrivilege> partGrants = listPrincipalAllPartitionGrants(
mRol.getRoleName(), PrincipalType.ROLE);
if (partGrants.size() > 0) {
pm.deletePersistentAll(partGrants);
}
List<MTableColumnPrivilege> tblColumnGrants = listPrincipalAllTableColumnGrants(
mRol.getRoleName(), PrincipalType.ROLE);
if (tblColumnGrants.size() > 0) {
pm.deletePersistentAll(tblColumnGrants);
}
List<MPartitionColumnPrivilege> partColumnGrants = listPrincipalAllPartitionColumnGrants(
mRol.getRoleName(), PrincipalType.ROLE);
if (partColumnGrants.size() > 0) {
pm.deletePersistentAll(partColumnGrants);
}
// finally remove the role
pm.deletePersistent(mRol);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
return success;
}
/**
* Get all the roles in the role hierarchy that this user and groupNames belongs to
* @param userName
* @param groupNames
* @return
*/
private Set<String> listAllRolesInHierarchy(String userName,
List<String> groupNames) {
List<MRoleMap> ret = new ArrayList<MRoleMap>();
if(userName != null) {
ret.addAll(listRoles(userName, PrincipalType.USER));
}
if (groupNames != null) {
for (String groupName: groupNames) {
ret.addAll(listRoles(groupName, PrincipalType.GROUP));
}
}
// get names of these roles and its ancestors
Set<String> roleNames = new HashSet<String>();
getAllRoleAncestors(roleNames, ret);
return roleNames;
}
/**
* Add role names of parentRoles and its parents to processedRoles
*
* @param processedRoleNames
* @param parentRoles
*/
private void getAllRoleAncestors(Set<String> processedRoleNames, List<MRoleMap> parentRoles) {
for (MRoleMap parentRole : parentRoles) {
String parentRoleName = parentRole.getRole().getRoleName();
if (!processedRoleNames.contains(parentRoleName)) {
// unprocessed role: get its parents, add it to processed, and call this
// function recursively
List<MRoleMap> nextParentRoles = listRoles(parentRoleName, PrincipalType.ROLE);
processedRoleNames.add(parentRoleName);
getAllRoleAncestors(processedRoleNames, nextParentRoles);
}
}
}
@SuppressWarnings("unchecked")
@Override
public List<MRoleMap> listRoles(String principalName,
PrincipalType principalType) {
boolean success = false;
List<MRoleMap> mRoleMember = null;
try {
openTransaction();
LOG.debug("Executing listRoles");
Query query = pm
.newQuery(
MRoleMap.class,
"principalName == t1 && principalType == t2");
query
.declareParameters("java.lang.String t1, java.lang.String t2");
query.setUnique(false);
mRoleMember = (List<MRoleMap>) query.executeWithArray(
principalName, principalType.toString());
LOG.debug("Done executing query for listMSecurityUserRoleMap");
pm.retrieveAll(mRoleMember);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMSecurityUserRoleMap");
} finally {
if (!success) {
rollbackTransaction();
}
}
if (principalType == PrincipalType.USER) {
// All users belong to public role implicitly, add that role
if (mRoleMember == null) {
mRoleMember = new ArrayList<MRoleMap>();
} else {
mRoleMember = new ArrayList<MRoleMap>(mRoleMember);
}
MRole publicRole = new MRole(HiveMetaStore.PUBLIC, 0, HiveMetaStore.PUBLIC);
mRoleMember.add(new MRoleMap(principalName, principalType.toString(), publicRole, 0,
null, null, false));
}
return mRoleMember;
}
@SuppressWarnings("unchecked")
private List<MRoleMap> listMSecurityPrincipalMembershipRole(final String roleName,
final PrincipalType principalType) {
boolean success = false;
List<MRoleMap> mRoleMemebership = null;
try {
openTransaction();
LOG.debug("Executing listMSecurityPrincipalMembershipRole");
Query query = pm.newQuery(MRoleMap.class,
"principalName == t1 && principalType == t2");
query
.declareParameters("java.lang.String t1, java.lang.String t2");
mRoleMemebership = (List<MRoleMap>) query.execute(roleName, principalType.toString());
LOG
.debug("Done executing query for listMSecurityPrincipalMembershipRole");
pm.retrieveAll(mRoleMemebership);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listMSecurityPrincipalMembershipRole");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mRoleMemebership;
}
@Override
public Role getRole(String roleName) throws NoSuchObjectException {
MRole mRole = this.getMRole(roleName);
if (mRole == null) {
throw new NoSuchObjectException(roleName + " role can not be found.");
}
Role ret = new Role(mRole.getRoleName(), mRole.getCreateTime(), mRole
.getOwnerName());
return ret;
}
private MRole getMRole(String roleName) {
MRole mrole = null;
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MRole.class, "roleName == t1");
query.declareParameters("java.lang.String t1");
query.setUnique(true);
mrole = (MRole) query.execute(roleName);
pm.retrieve(mrole);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return mrole;
}
@Override
public List<String> listRoleNames() {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listAllRoleNames");
Query query = pm.newQuery("select roleName from org.apache.hadoop.hive.metastore.model.MRole");
query.setResult("roleName");
Collection names = (Collection) query.execute();
List<String> roleNames = new ArrayList<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
roleNames.add((String) i.next());
}
success = commitTransaction();
return roleNames;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public PrincipalPrivilegeSet getUserPrivilegeSet(String userName,
List<String> groupNames) throws InvalidObjectException, MetaException {
boolean commited = false;
PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
try {
openTransaction();
if (userName != null) {
List<MGlobalPrivilege> user = this.listPrincipalGlobalGrants(userName, PrincipalType.USER);
if(user.size()>0) {
Map<String, List<PrivilegeGrantInfo>> userPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(user.size());
for (int i = 0; i < user.size(); i++) {
MGlobalPrivilege item = user.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
userPriv.put(userName, grantInfos);
ret.setUserPrivileges(userPriv);
}
}
if (groupNames != null && groupNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> groupPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for(String groupName: groupNames) {
List<MGlobalPrivilege> group = this.listPrincipalGlobalGrants(groupName, PrincipalType.GROUP);
if(group.size()>0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(group.size());
for (int i = 0; i < group.size(); i++) {
MGlobalPrivilege item = group.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
groupPriv.put(groupName, grantInfos);
}
}
ret.setGroupPrivileges(groupPriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return ret;
}
public List<PrivilegeGrantInfo> getDBPrivilege(String dbName,
String principalName, PrincipalType principalType)
throws InvalidObjectException, MetaException {
dbName = HiveStringUtils.normalizeIdentifier(dbName);
if (principalName != null) {
List<MDBPrivilege> userNameDbPriv = this.listPrincipalDBGrants(
principalName, principalType, dbName);
if (userNameDbPriv != null && userNameDbPriv.size() > 0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(
userNameDbPriv.size());
for (int i = 0; i < userNameDbPriv.size(); i++) {
MDBPrivilege item = userNameDbPriv.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
return grantInfos;
}
}
return new ArrayList<PrivilegeGrantInfo>(0);
}
@Override
public PrincipalPrivilegeSet getDBPrivilegeSet(String dbName,
String userName, List<String> groupNames) throws InvalidObjectException,
MetaException {
boolean commited = false;
dbName = HiveStringUtils.normalizeIdentifier(dbName);
PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
try {
openTransaction();
if (userName != null) {
Map<String, List<PrivilegeGrantInfo>> dbUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
dbUserPriv.put(userName, getDBPrivilege(dbName, userName,
PrincipalType.USER));
ret.setUserPrivileges(dbUserPriv);
}
if (groupNames != null && groupNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> dbGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String groupName : groupNames) {
dbGroupPriv.put(groupName, getDBPrivilege(dbName, groupName,
PrincipalType.GROUP));
}
ret.setGroupPrivileges(dbGroupPriv);
}
Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> dbRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String roleName : roleNames) {
dbRolePriv
.put(roleName, getDBPrivilege(dbName, roleName, PrincipalType.ROLE));
}
ret.setRolePrivileges(dbRolePriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return ret;
}
@Override
public PrincipalPrivilegeSet getPartitionPrivilegeSet(String dbName,
String tableName, String partition, String userName,
List<String> groupNames) throws InvalidObjectException, MetaException {
boolean commited = false;
PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
try {
openTransaction();
if (userName != null) {
Map<String, List<PrivilegeGrantInfo>> partUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
partUserPriv.put(userName, getPartitionPrivilege(dbName,
tableName, partition, userName, PrincipalType.USER));
ret.setUserPrivileges(partUserPriv);
}
if (groupNames != null && groupNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> partGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String groupName : groupNames) {
partGroupPriv.put(groupName, getPartitionPrivilege(dbName, tableName,
partition, groupName, PrincipalType.GROUP));
}
ret.setGroupPrivileges(partGroupPriv);
}
Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> partRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String roleName : roleNames) {
partRolePriv.put(roleName, getPartitionPrivilege(dbName, tableName,
partition, roleName, PrincipalType.ROLE));
}
ret.setRolePrivileges(partRolePriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return ret;
}
@Override
public PrincipalPrivilegeSet getTablePrivilegeSet(String dbName,
String tableName, String userName, List<String> groupNames)
throws InvalidObjectException, MetaException {
boolean commited = false;
PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
try {
openTransaction();
if (userName != null) {
Map<String, List<PrivilegeGrantInfo>> tableUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
tableUserPriv.put(userName, getTablePrivilege(dbName,
tableName, userName, PrincipalType.USER));
ret.setUserPrivileges(tableUserPriv);
}
if (groupNames != null && groupNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> tableGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String groupName : groupNames) {
tableGroupPriv.put(groupName, getTablePrivilege(dbName, tableName,
groupName, PrincipalType.GROUP));
}
ret.setGroupPrivileges(tableGroupPriv);
}
Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> tableRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String roleName : roleNames) {
tableRolePriv.put(roleName, getTablePrivilege(dbName, tableName,
roleName, PrincipalType.ROLE));
}
ret.setRolePrivileges(tableRolePriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return ret;
}
@Override
public PrincipalPrivilegeSet getColumnPrivilegeSet(String dbName,
String tableName, String partitionName, String columnName,
String userName, List<String> groupNames) throws InvalidObjectException,
MetaException {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
columnName = HiveStringUtils.normalizeIdentifier(columnName);
boolean commited = false;
PrincipalPrivilegeSet ret = new PrincipalPrivilegeSet();
try {
openTransaction();
if (userName != null) {
Map<String, List<PrivilegeGrantInfo>> columnUserPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
columnUserPriv.put(userName, getColumnPrivilege(dbName, tableName,
columnName, partitionName, userName, PrincipalType.USER));
ret.setUserPrivileges(columnUserPriv);
}
if (groupNames != null && groupNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> columnGroupPriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String groupName : groupNames) {
columnGroupPriv.put(groupName, getColumnPrivilege(dbName, tableName,
columnName, partitionName, groupName, PrincipalType.GROUP));
}
ret.setGroupPrivileges(columnGroupPriv);
}
Set<String> roleNames = listAllRolesInHierarchy(userName, groupNames);
if (roleNames != null && roleNames.size() > 0) {
Map<String, List<PrivilegeGrantInfo>> columnRolePriv = new HashMap<String, List<PrivilegeGrantInfo>>();
for (String roleName : roleNames) {
columnRolePriv.put(roleName, getColumnPrivilege(dbName, tableName,
columnName, partitionName, roleName, PrincipalType.ROLE));
}
ret.setRolePrivileges(columnRolePriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return ret;
}
private List<PrivilegeGrantInfo> getPartitionPrivilege(String dbName,
String tableName, String partName, String principalName,
PrincipalType principalType) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
if (principalName != null) {
List<MPartitionPrivilege> userNameTabPartPriv = this
.listPrincipalPartitionGrants(principalName, principalType,
dbName, tableName, partName);
if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(
userNameTabPartPriv.size());
for (int i = 0; i < userNameTabPartPriv.size(); i++) {
MPartitionPrivilege item = userNameTabPartPriv.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(),
getPrincipalTypeFromStr(item.getGrantorType()), item.getGrantOption()));
}
return grantInfos;
}
}
return new ArrayList<PrivilegeGrantInfo>(0);
}
private PrincipalType getPrincipalTypeFromStr(String str) {
return str == null ? null : PrincipalType.valueOf(str);
}
private List<PrivilegeGrantInfo> getTablePrivilege(String dbName,
String tableName, String principalName, PrincipalType principalType) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
if (principalName != null) {
List<MTablePrivilege> userNameTabPartPriv = this
.listAllTableGrants(principalName, principalType,
dbName, tableName);
if (userNameTabPartPriv != null && userNameTabPartPriv.size() > 0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(
userNameTabPartPriv.size());
for (int i = 0; i < userNameTabPartPriv.size(); i++) {
MTablePrivilege item = userNameTabPartPriv.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
return grantInfos;
}
}
return new ArrayList<PrivilegeGrantInfo>(0);
}
private List<PrivilegeGrantInfo> getColumnPrivilege(String dbName,
String tableName, String columnName, String partitionName,
String principalName, PrincipalType principalType) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
columnName = HiveStringUtils.normalizeIdentifier(columnName);
if (partitionName == null) {
List<MTableColumnPrivilege> userNameColumnPriv = this
.listPrincipalTableColumnGrants(principalName, principalType,
dbName, tableName, columnName);
if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(
userNameColumnPriv.size());
for (int i = 0; i < userNameColumnPriv.size(); i++) {
MTableColumnPrivilege item = userNameColumnPriv.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
return grantInfos;
}
} else {
List<MPartitionColumnPrivilege> userNameColumnPriv = this
.listPrincipalPartitionColumnGrants(principalName,
principalType, dbName, tableName, partitionName, columnName);
if (userNameColumnPriv != null && userNameColumnPriv.size() > 0) {
List<PrivilegeGrantInfo> grantInfos = new ArrayList<PrivilegeGrantInfo>(
userNameColumnPriv.size());
for (int i = 0; i < userNameColumnPriv.size(); i++) {
MPartitionColumnPrivilege item = userNameColumnPriv.get(i);
grantInfos.add(new PrivilegeGrantInfo(item.getPrivilege(), item
.getCreateTime(), item.getGrantor(), getPrincipalTypeFromStr(item
.getGrantorType()), item.getGrantOption()));
}
return grantInfos;
}
}
return new ArrayList<PrivilegeGrantInfo>(0);
}
@Override
public boolean grantPrivileges(PrivilegeBag privileges) throws InvalidObjectException,
MetaException, NoSuchObjectException {
boolean committed = false;
int now = (int) (System.currentTimeMillis() / 1000);
try {
openTransaction();
List<Object> persistentObjs = new ArrayList<Object>();
List<HiveObjectPrivilege> privilegeList = privileges.getPrivileges();
if (privilegeList != null && privilegeList.size() > 0) {
Iterator<HiveObjectPrivilege> privIter = privilegeList.iterator();
Set<String> privSet = new HashSet<String>();
while (privIter.hasNext()) {
HiveObjectPrivilege privDef = privIter.next();
HiveObjectRef hiveObject = privDef.getHiveObject();
String privilegeStr = privDef.getGrantInfo().getPrivilege();
String[] privs = privilegeStr.split(",");
String userName = privDef.getPrincipalName();
PrincipalType principalType = privDef.getPrincipalType();
String grantor = privDef.getGrantInfo().getGrantor();
String grantorType = privDef.getGrantInfo().getGrantorType().toString();
boolean grantOption = privDef.getGrantInfo().isGrantOption();
privSet.clear();
if(principalType == PrincipalType.ROLE){
validateRole(userName);
}
if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) {
List<MGlobalPrivilege> globalPrivs = this
.listPrincipalGlobalGrants(userName, principalType);
if (globalPrivs != null) {
for (MGlobalPrivilege priv : globalPrivs) {
if (priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted by " + grantor);
}
MGlobalPrivilege mGlobalPrivs = new MGlobalPrivilege(userName,
principalType.toString(), privilege, now, grantor, grantorType, grantOption);
persistentObjs.add(mGlobalPrivs);
}
} else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) {
MDatabase dbObj = getMDatabase(hiveObject.getDbName());
if (dbObj != null) {
List<MDBPrivilege> dbPrivs = this.listPrincipalDBGrants(
userName, principalType, hiveObject.getDbName());
if (dbPrivs != null) {
for (MDBPrivilege priv : dbPrivs) {
if (priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted on database "
+ hiveObject.getDbName() + " by " + grantor);
}
MDBPrivilege mDb = new MDBPrivilege(userName, principalType
.toString(), dbObj, privilege, now, grantor, grantorType, grantOption);
persistentObjs.add(mDb);
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.TABLE) {
MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject
.getObjectName());
if (tblObj != null) {
List<MTablePrivilege> tablePrivs = this
.listAllTableGrants(userName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName());
if (tablePrivs != null) {
for (MTablePrivilege priv : tablePrivs) {
if (priv.getGrantor() != null
&& priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted on table ["
+ hiveObject.getDbName() + ","
+ hiveObject.getObjectName() + "] by " + grantor);
}
MTablePrivilege mTab = new MTablePrivilege(
userName, principalType.toString(), tblObj,
privilege, now, grantor, grantorType, grantOption);
persistentObjs.add(mTab);
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) {
MPartition partObj = this.getMPartition(hiveObject.getDbName(),
hiveObject.getObjectName(), hiveObject.getPartValues());
String partName = null;
if (partObj != null) {
partName = partObj.getPartitionName();
List<MPartitionPrivilege> partPrivs = this
.listPrincipalPartitionGrants(userName,
principalType, hiveObject.getDbName(), hiveObject
.getObjectName(), partObj.getPartitionName());
if (partPrivs != null) {
for (MPartitionPrivilege priv : partPrivs) {
if (priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted on partition ["
+ hiveObject.getDbName() + ","
+ hiveObject.getObjectName() + ","
+ partName + "] by " + grantor);
}
MPartitionPrivilege mTab = new MPartitionPrivilege(userName,
principalType.toString(), partObj, privilege, now, grantor,
grantorType, grantOption);
persistentObjs.add(mTab);
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
MTable tblObj = getMTable(hiveObject.getDbName(), hiveObject
.getObjectName());
if (tblObj != null) {
if (hiveObject.getPartValues() != null) {
MPartition partObj = null;
List<MPartitionColumnPrivilege> colPrivs = null;
partObj = this.getMPartition(hiveObject.getDbName(), hiveObject
.getObjectName(), hiveObject.getPartValues());
if (partObj == null) {
continue;
}
colPrivs = this.listPrincipalPartitionColumnGrants(
userName, principalType, hiveObject.getDbName(), hiveObject
.getObjectName(), partObj.getPartitionName(),
hiveObject.getColumnName());
if (colPrivs != null) {
for (MPartitionColumnPrivilege priv : colPrivs) {
if (priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted on column "
+ hiveObject.getColumnName() + " ["
+ hiveObject.getDbName() + ","
+ hiveObject.getObjectName() + ","
+ partObj.getPartitionName() + "] by " + grantor);
}
MPartitionColumnPrivilege mCol = new MPartitionColumnPrivilege(userName,
principalType.toString(), partObj, hiveObject
.getColumnName(), privilege, now, grantor, grantorType,
grantOption);
persistentObjs.add(mCol);
}
} else {
List<MTableColumnPrivilege> colPrivs = null;
colPrivs = this.listPrincipalTableColumnGrants(
userName, principalType, hiveObject.getDbName(), hiveObject
.getObjectName(), hiveObject.getColumnName());
if (colPrivs != null) {
for (MTableColumnPrivilege priv : colPrivs) {
if (priv.getGrantor().equalsIgnoreCase(grantor)) {
privSet.add(priv.getPrivilege());
}
}
}
for (String privilege : privs) {
if (privSet.contains(privilege)) {
throw new InvalidObjectException(privilege
+ " is already granted on column "
+ hiveObject.getColumnName() + " ["
+ hiveObject.getDbName() + ","
+ hiveObject.getObjectName() + "] by " + grantor);
}
MTableColumnPrivilege mCol = new MTableColumnPrivilege(userName,
principalType.toString(), tblObj, hiveObject
.getColumnName(), privilege, now, grantor, grantorType,
grantOption);
persistentObjs.add(mCol);
}
}
}
}
}
}
if (persistentObjs.size() > 0) {
pm.makePersistentAll(persistentObjs);
}
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
}
}
return committed;
}
@Override
public boolean revokePrivileges(PrivilegeBag privileges, boolean grantOption)
throws InvalidObjectException, MetaException, NoSuchObjectException {
boolean committed = false;
try {
openTransaction();
List<Object> persistentObjs = new ArrayList<Object>();
List<HiveObjectPrivilege> privilegeList = privileges.getPrivileges();
if (privilegeList != null && privilegeList.size() > 0) {
Iterator<HiveObjectPrivilege> privIter = privilegeList.iterator();
while (privIter.hasNext()) {
HiveObjectPrivilege privDef = privIter.next();
HiveObjectRef hiveObject = privDef.getHiveObject();
String privilegeStr = privDef.getGrantInfo().getPrivilege();
if (privilegeStr == null || privilegeStr.trim().equals("")) {
continue;
}
String[] privs = privilegeStr.split(",");
String userName = privDef.getPrincipalName();
PrincipalType principalType = privDef.getPrincipalType();
if (hiveObject.getObjectType() == HiveObjectType.GLOBAL) {
List<MGlobalPrivilege> mSecUser = this.listPrincipalGlobalGrants(
userName, principalType);
boolean found = false;
if (mSecUser != null) {
for (String privilege : privs) {
for (MGlobalPrivilege userGrant : mSecUser) {
String userGrantPrivs = userGrant.getPrivilege();
if (privilege.equals(userGrantPrivs)) {
found = true;
if (grantOption) {
if (userGrant.getGrantOption()) {
userGrant.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(userGrant);
break;
}
}
if (!found) {
throw new InvalidObjectException(
"No user grant found for privileges " + privilege);
}
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.DATABASE) {
MDatabase dbObj = getMDatabase(hiveObject.getDbName());
if (dbObj != null) {
String db = hiveObject.getDbName();
boolean found = false;
List<MDBPrivilege> dbGrants = this.listPrincipalDBGrants(
userName, principalType, db);
for (String privilege : privs) {
for (MDBPrivilege dbGrant : dbGrants) {
String dbGrantPriv = dbGrant.getPrivilege();
if (privilege.equals(dbGrantPriv)) {
found = true;
if (grantOption) {
if (dbGrant.getGrantOption()) {
dbGrant.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(dbGrant);
break;
}
}
if (!found) {
throw new InvalidObjectException(
"No database grant found for privileges " + privilege
+ " on database " + db);
}
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.TABLE) {
boolean found = false;
List<MTablePrivilege> tableGrants = this
.listAllTableGrants(userName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName());
for (String privilege : privs) {
for (MTablePrivilege tabGrant : tableGrants) {
String tableGrantPriv = tabGrant.getPrivilege();
if (privilege.equalsIgnoreCase(tableGrantPriv)) {
found = true;
if (grantOption) {
if (tabGrant.getGrantOption()) {
tabGrant.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(tabGrant);
break;
}
}
if (!found) {
throw new InvalidObjectException("No grant (" + privilege
+ ") found " + " on table " + hiveObject.getObjectName()
+ ", database is " + hiveObject.getDbName());
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.PARTITION) {
boolean found = false;
Table tabObj = this.getTable(hiveObject.getDbName(), hiveObject.getObjectName());
String partName = null;
if (hiveObject.getPartValues() != null) {
partName = Warehouse.makePartName(tabObj.getPartitionKeys(), hiveObject.getPartValues());
}
List<MPartitionPrivilege> partitionGrants = this
.listPrincipalPartitionGrants(userName, principalType,
hiveObject.getDbName(), hiveObject.getObjectName(), partName);
for (String privilege : privs) {
for (MPartitionPrivilege partGrant : partitionGrants) {
String partPriv = partGrant.getPrivilege();
if (partPriv.equalsIgnoreCase(privilege)) {
found = true;
if (grantOption) {
if (partGrant.getGrantOption()) {
partGrant.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(partGrant);
break;
}
}
if (!found) {
throw new InvalidObjectException("No grant (" + privilege
+ ") found " + " on table " + tabObj.getTableName()
+ ", partition is " + partName + ", database is " + tabObj.getDbName());
}
}
} else if (hiveObject.getObjectType() == HiveObjectType.COLUMN) {
Table tabObj = this.getTable(hiveObject.getDbName(), hiveObject
.getObjectName());
String partName = null;
if (hiveObject.getPartValues() != null) {
partName = Warehouse.makePartName(tabObj.getPartitionKeys(),
hiveObject.getPartValues());
}
if (partName != null) {
List<MPartitionColumnPrivilege> mSecCol = listPrincipalPartitionColumnGrants(
userName, principalType, hiveObject.getDbName(), hiveObject
.getObjectName(), partName, hiveObject.getColumnName());
boolean found = false;
if (mSecCol != null) {
for (String privilege : privs) {
for (MPartitionColumnPrivilege col : mSecCol) {
String colPriv = col.getPrivilege();
if (colPriv.equalsIgnoreCase(privilege)) {
found = true;
if (grantOption) {
if (col.getGrantOption()) {
col.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(col);
break;
}
}
if (!found) {
throw new InvalidObjectException("No grant (" + privilege
+ ") found " + " on table " + tabObj.getTableName()
+ ", partition is " + partName + ", column name = "
+ hiveObject.getColumnName() + ", database is "
+ tabObj.getDbName());
}
}
}
} else {
List<MTableColumnPrivilege> mSecCol = listPrincipalTableColumnGrants(
userName, principalType, hiveObject.getDbName(), hiveObject
.getObjectName(), hiveObject.getColumnName());
boolean found = false;
if (mSecCol != null) {
for (String privilege : privs) {
for (MTableColumnPrivilege col : mSecCol) {
String colPriv = col.getPrivilege();
if (colPriv.equalsIgnoreCase(privilege)) {
found = true;
if (grantOption) {
if (col.getGrantOption()) {
col.setGrantOption(false);
} else {
throw new MetaException("User " + userName
+ " does not have grant option with privilege " + privilege);
}
}
persistentObjs.add(col);
break;
}
}
if (!found) {
throw new InvalidObjectException("No grant (" + privilege
+ ") found " + " on table " + tabObj.getTableName()
+ ", column name = "
+ hiveObject.getColumnName() + ", database is "
+ tabObj.getDbName());
}
}
}
}
}
}
}
if (persistentObjs.size() > 0) {
if (grantOption) {
// If grant option specified, only update the privilege, don't remove it.
// Grant option has already been removed from the privileges in the section above
} else {
pm.deletePersistentAll(persistentObjs);
}
}
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
}
}
return committed;
}
@SuppressWarnings("unchecked")
@Override
public List<MRoleMap> listRoleMembers(String roleName) {
boolean success = false;
List<MRoleMap> mRoleMemeberList = null;
try {
openTransaction();
LOG.debug("Executing listMSecurityUserRoleMember");
Query query = pm.newQuery(MRoleMap.class,
"role.roleName == t1");
query.declareParameters("java.lang.String t1");
query.setUnique(false);
mRoleMemeberList = (List<MRoleMap>) query.execute(
roleName);
LOG.debug("Done executing query for listMSecurityUserRoleMember");
pm.retrieveAll(mRoleMemeberList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMSecurityUserRoleMember");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mRoleMemeberList;
}
@SuppressWarnings("unchecked")
@Override
public List<MGlobalPrivilege> listPrincipalGlobalGrants(String principalName, PrincipalType principalType) {
boolean commited = false;
List<MGlobalPrivilege> userNameDbPriv = null;
try {
openTransaction();
if (principalName != null) {
Query query = pm.newQuery(MGlobalPrivilege.class,
"principalName == t1 && principalType == t2 ");
query.declareParameters(
"java.lang.String t1, java.lang.String t2");
userNameDbPriv = (List<MGlobalPrivilege>) query
.executeWithArray(principalName, principalType.toString());
pm.retrieveAll(userNameDbPriv);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return userNameDbPriv;
}
@Override
public List<HiveObjectPrivilege> listGlobalGrantsAll() {
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MGlobalPrivilege.class);
List<MGlobalPrivilege> userNameDbPriv = (List<MGlobalPrivilege>) query.execute();
pm.retrieveAll(userNameDbPriv);
commited = commitTransaction();
return convertGlobal(userNameDbPriv);
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
private List<HiveObjectPrivilege> convertGlobal(List<MGlobalPrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MGlobalPrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.GLOBAL, null, null, null, null);
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
@Override
public List<MDBPrivilege> listPrincipalDBGrants(String principalName,
PrincipalType principalType, String dbName) {
boolean success = false;
List<MDBPrivilege> mSecurityDBList = null;
dbName = HiveStringUtils.normalizeIdentifier(dbName);
try {
openTransaction();
LOG.debug("Executing listPrincipalDBGrants");
Query query = pm.newQuery(MDBPrivilege.class,
"principalName == t1 && principalType == t2 && database.name == t3");
query
.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
mSecurityDBList = (List<MDBPrivilege>) query.executeWithArray(principalName, principalType.toString(), dbName);
LOG.debug("Done executing query for listPrincipalDBGrants");
pm.retrieveAll(mSecurityDBList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalDBGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityDBList;
}
@Override
public List<HiveObjectPrivilege> listPrincipalDBGrantsAll(
String principalName, PrincipalType principalType) {
return convertDB(listPrincipalAllDBGrant(principalName, principalType));
}
@Override
public List<HiveObjectPrivilege> listDBGrantsAll(String dbName) {
return convertDB(listDatabaseGrants(dbName));
}
private List<HiveObjectPrivilege> convertDB(List<MDBPrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MDBPrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
String database = priv.getDatabase().getName();
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.DATABASE, database,
null, null, null);
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
private List<MDBPrivilege> listPrincipalAllDBGrant(
String principalName, PrincipalType principalType) {
boolean success = false;
List<MDBPrivilege> mSecurityDBList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllDBGrant");
if (principalName != null && principalType != null) {
Query query = pm.newQuery(MDBPrivilege.class,
"principalName == t1 && principalType == t2");
query
.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityDBList = (List<MDBPrivilege>) query.execute(principalName, principalType.toString());
} else {
Query query = pm.newQuery(MDBPrivilege.class);
mSecurityDBList = (List<MDBPrivilege>) query.execute();
}
LOG.debug("Done executing query for listPrincipalAllDBGrant");
pm.retrieveAll(mSecurityDBList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalAllDBGrant");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityDBList;
}
@SuppressWarnings("unchecked")
public List<MTablePrivilege> listAllTableGrants(String dbName,
String tableName) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
List<MTablePrivilege> mSecurityTabList = null;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
try {
openTransaction();
LOG.debug("Executing listAllTableGrants");
String queryStr = "table.tableName == t1 && table.database.name == t2";
Query query = pm.newQuery(
MTablePrivilege.class, queryStr);
query.declareParameters(
"java.lang.String t1, java.lang.String t2");
mSecurityTabList = (List<MTablePrivilege>) query
.executeWithArray(tableName, dbName);
LOG.debug("Done executing query for listAllTableGrants");
pm.retrieveAll(mSecurityTabList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listAllTableGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabList;
}
@SuppressWarnings("unchecked")
public List<MPartitionPrivilege> listTableAllPartitionGrants(String dbName,
String tableName) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
boolean success = false;
List<MPartitionPrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listTableAllPartitionGrants");
String queryStr = "partition.table.tableName == t1 && partition.table.database.name == t2";
Query query = pm.newQuery(
MPartitionPrivilege.class, queryStr);
query.declareParameters(
"java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MPartitionPrivilege>) query
.executeWithArray(tableName, dbName);
LOG.debug("Done executing query for listTableAllPartitionGrants");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listTableAllPartitionGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
@SuppressWarnings("unchecked")
public List<MTableColumnPrivilege> listTableAllColumnGrants(String dbName,
String tableName) {
boolean success = false;
List<MTableColumnPrivilege> mTblColPrivilegeList = null;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
try {
openTransaction();
LOG.debug("Executing listTableAllColumnGrants");
String queryStr = "table.tableName == t1 && table.database.name == t2";
Query query = pm.newQuery(MTableColumnPrivilege.class, queryStr);
query.declareParameters("java.lang.String t1, java.lang.String t2");
mTblColPrivilegeList = (List<MTableColumnPrivilege>) query
.executeWithArray(tableName, dbName);
LOG.debug("Done executing query for listTableAllColumnGrants");
pm.retrieveAll(mTblColPrivilegeList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listTableAllColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mTblColPrivilegeList;
}
@SuppressWarnings("unchecked")
public List<MPartitionColumnPrivilege> listTableAllPartitionColumnGrants(String dbName,
String tableName) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
List<MPartitionColumnPrivilege> mSecurityColList = null;
try {
openTransaction();
LOG.debug("Executing listTableAllPartitionColumnGrants");
String queryStr = "partition.table.tableName == t1 && partition.table.database.name == t2";
Query query = pm.newQuery(MPartitionColumnPrivilege.class, queryStr);
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityColList = (List<MPartitionColumnPrivilege>) query
.executeWithArray(tableName, dbName);
LOG.debug("Done executing query for listTableAllPartitionColumnGrants");
pm.retrieveAll(mSecurityColList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listTableAllPartitionColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColList;
}
@SuppressWarnings("unchecked")
public List<MPartitionColumnPrivilege> listPartitionAllColumnGrants(String dbName,
String tableName, List<String> partNames) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
List<MPartitionColumnPrivilege> mSecurityColList = null;
try {
openTransaction();
LOG.debug("Executing listPartitionAllColumnGrants");
mSecurityColList = queryByPartitionNames(
dbName, tableName, partNames, MPartitionColumnPrivilege.class,
"partition.table.tableName", "partition.table.database.name", "partition.partitionName");
LOG.debug("Done executing query for listPartitionAllColumnGrants");
pm.retrieveAll(mSecurityColList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPartitionAllColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColList;
}
public void dropPartitionAllColumnGrantsNoTxn(
String dbName, String tableName, List<String> partNames) {
ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames(
dbName, tableName, partNames, MPartitionColumnPrivilege.class,
"partition.table.tableName", "partition.table.database.name", "partition.partitionName");
queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond());
}
@SuppressWarnings("unchecked")
private List<MDBPrivilege> listDatabaseGrants(String dbName) {
dbName = HiveStringUtils.normalizeIdentifier(dbName);
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listDatabaseGrants");
Query query = pm.newQuery(MDBPrivilege.class,
"database.name == t1");
query.declareParameters("java.lang.String t1");
List<MDBPrivilege> mSecurityDBList = (List<MDBPrivilege>) query
.executeWithArray(dbName);
LOG.debug("Done executing query for listDatabaseGrants");
pm.retrieveAll(mSecurityDBList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listDatabaseGrants");
return mSecurityDBList;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@SuppressWarnings("unchecked")
private List<MPartitionPrivilege> listPartitionGrants(String dbName, String tableName,
List<String> partNames) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
boolean success = false;
List<MPartitionPrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listPartitionGrants");
mSecurityTabPartList = queryByPartitionNames(
dbName, tableName, partNames, MPartitionPrivilege.class, "partition.table.tableName",
"partition.table.database.name", "partition.partitionName");
LOG.debug("Done executing query for listPartitionGrants");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPartitionGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
private void dropPartitionGrantsNoTxn(String dbName, String tableName, List<String> partNames) {
ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames(
dbName, tableName, partNames,MPartitionPrivilege.class, "partition.table.tableName",
"partition.table.database.name", "partition.partitionName");
queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond());
}
@SuppressWarnings("unchecked")
private <T> List<T> queryByPartitionNames(String dbName, String tableName,
List<String> partNames, Class<T> clazz, String tbCol, String dbCol, String partCol) {
ObjectPair<Query, Object[]> queryAndParams = makeQueryByPartitionNames(
dbName, tableName, partNames, clazz, tbCol, dbCol, partCol);
return (List<T>)queryAndParams.getFirst().executeWithArray(queryAndParams.getSecond());
}
private ObjectPair<Query, Object[]> makeQueryByPartitionNames(
String dbName, String tableName, List<String> partNames, Class<?> clazz,
String tbCol, String dbCol, String partCol) {
String queryStr = tbCol + " == t1 && " + dbCol + " == t2";
String paramStr = "java.lang.String t1, java.lang.String t2";
Object[] params = new Object[2 + partNames.size()];
params[0] = HiveStringUtils.normalizeIdentifier(tableName);
params[1] = HiveStringUtils.normalizeIdentifier(dbName);
int index = 0;
for (String partName : partNames) {
params[index + 2] = partName;
queryStr += ((index == 0) ? " && (" : " || ") + partCol + " == p" + index;
paramStr += ", java.lang.String p" + index;
++index;
}
queryStr += ")";
Query query = pm.newQuery(clazz, queryStr);
query.declareParameters(paramStr);
return new ObjectPair<Query, Object[]>(query, params);
}
@Override
@SuppressWarnings("unchecked")
public List<MTablePrivilege> listAllTableGrants(
String principalName, PrincipalType principalType, String dbName,
String tableName) {
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
boolean success = false;
List<MTablePrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listAllTableGrants");
Query query = pm.newQuery(
MTablePrivilege.class,
"principalName == t1 && principalType == t2 && table.tableName == t3 && table.database.name == t4");
query.declareParameters(
"java.lang.String t1, java.lang.String t2, java.lang.String t3, java.lang.String t4");
mSecurityTabPartList = (List<MTablePrivilege>) query
.executeWithArray(principalName, principalType.toString(), tableName, dbName);
LOG.debug("Done executing query for listAllTableGrants");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listAllTableGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
@SuppressWarnings("unchecked")
@Override
public List<MPartitionPrivilege> listPrincipalPartitionGrants(
String principalName, PrincipalType principalType, String dbName,
String tableName, String partName) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
List<MPartitionPrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listMSecurityPrincipalPartitionGrant");
Query query = pm.newQuery(
MPartitionPrivilege.class,
"principalName == t1 && principalType == t2 && partition.table.tableName == t3 " +
"&& partition.table.database.name == t4 && partition.partitionName == t5");
query.declareParameters(
"java.lang.String t1, java.lang.String t2, java.lang.String t3, java.lang.String t4, " +
"java.lang.String t5");
mSecurityTabPartList = (List<MPartitionPrivilege>) query
.executeWithArray(principalName, principalType.toString(), tableName, dbName, partName);
LOG.debug("Done executing query for listMSecurityPrincipalPartitionGrant");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listMSecurityPrincipalPartitionGrant");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
@SuppressWarnings("unchecked")
@Override
public List<MTableColumnPrivilege> listPrincipalTableColumnGrants(
String principalName, PrincipalType principalType, String dbName,
String tableName, String columnName) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
columnName = HiveStringUtils.normalizeIdentifier(columnName);
List<MTableColumnPrivilege> mSecurityColList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalTableColumnGrants");
String queryStr = "principalName == t1 && principalType == t2 && " +
"table.tableName == t3 && table.database.name == t4 && columnName == t5 ";
Query query = pm.newQuery(MTableColumnPrivilege.class, queryStr);
query
.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, " +
"java.lang.String t4, java.lang.String t5");
mSecurityColList = (List<MTableColumnPrivilege>) query.executeWithArray(
principalName, principalType.toString(), tableName, dbName, columnName);
LOG.debug("Done executing query for listPrincipalTableColumnGrants");
pm.retrieveAll(mSecurityColList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listPrincipalTableColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColList;
}
@Override
@SuppressWarnings("unchecked")
public List<MPartitionColumnPrivilege> listPrincipalPartitionColumnGrants(
String principalName, PrincipalType principalType, String dbName,
String tableName, String partitionName, String columnName) {
boolean success = false;
tableName = HiveStringUtils.normalizeIdentifier(tableName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
columnName = HiveStringUtils.normalizeIdentifier(columnName);
List<MPartitionColumnPrivilege> mSecurityColList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalPartitionColumnGrants");
Query query = pm
.newQuery(
MPartitionColumnPrivilege.class,
"principalName == t1 && principalType == t2 && partition.table.tableName == t3 " +
"&& partition.table.database.name == t4 && partition.partitionName == t5 && columnName == t6");
query
.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, " +
"java.lang.String t4, java.lang.String t5, java.lang.String t6");
mSecurityColList = (List<MPartitionColumnPrivilege>) query
.executeWithArray(principalName, principalType.toString(), tableName,
dbName, partitionName, columnName);
LOG.debug("Done executing query for listPrincipalPartitionColumnGrants");
pm.retrieveAll(mSecurityColList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listPrincipalPartitionColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColList;
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionColumnGrantsAll(
String principalName, PrincipalType principalType) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalPartitionColumnGrantsAll");
List<MPartitionColumnPrivilege> mSecurityTabPartList;
if (principalName != null && principalType != null) {
Query query = pm.newQuery(MPartitionColumnPrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MPartitionColumnPrivilege>)
query.executeWithArray(principalName, principalType.toString());
} else {
Query query = pm.newQuery(MPartitionColumnPrivilege.class);
mSecurityTabPartList = (List<MPartitionColumnPrivilege>) query.execute();
}
LOG.debug("Done executing query for listPrincipalPartitionColumnGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalPartitionColumnGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public List<HiveObjectPrivilege> listPartitionColumnGrantsAll(
String dbName, String tableName, String partitionName, String columnName) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPartitionColumnGrantsAll");
Query query = pm.newQuery(MPartitionColumnPrivilege.class,
"partition.table.tableName == t3 && partition.table.database.name == t4 && " +
"partition.partitionName == t5 && columnName == t6");
query.declareParameters(
"java.lang.String t3, java.lang.String t4, java.lang.String t5, java.lang.String t6");
List<MPartitionColumnPrivilege> mSecurityTabPartList = (List<MPartitionColumnPrivilege>)
query.executeWithArray(tableName, dbName, partitionName, columnName);
LOG.debug("Done executing query for listPartitionColumnGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertPartCols(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPartitionColumnGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<HiveObjectPrivilege> convertPartCols(List<MPartitionColumnPrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MPartitionColumnPrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
MPartition mpartition = priv.getPartition();
MTable mtable = mpartition.getTable();
MDatabase mdatabase = mtable.getDatabase();
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN,
mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), priv.getColumnName());
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
private List<MTablePrivilege> listPrincipalAllTableGrants(
String principalName, PrincipalType principalType) {
boolean success = false;
List<MTablePrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllTableGrants");
Query query = pm.newQuery(MTablePrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MTablePrivilege>) query.execute(
principalName, principalType.toString());
LOG
.debug("Done executing query for listPrincipalAllTableGrants");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listPrincipalAllTableGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
@Override
public List<HiveObjectPrivilege> listPrincipalTableGrantsAll(
String principalName, PrincipalType principalType) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllTableGrants");
List<MTablePrivilege> mSecurityTabPartList;
if (principalName != null && principalType != null) {
Query query = pm.newQuery(MTablePrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MTablePrivilege>) query.execute(
principalName, principalType.toString());
} else {
Query query = pm.newQuery(MTablePrivilege.class);
mSecurityTabPartList = (List<MTablePrivilege>) query.execute();
}
LOG.debug("Done executing query for listPrincipalAllTableGrants");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public List<HiveObjectPrivilege> listTableGrantsAll(String dbName, String tableName) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listTableGrantsAll");
Query query = pm.newQuery(MTablePrivilege.class,
"table.tableName == t1 && table.database.name == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
List<MTablePrivilege> mSecurityTabPartList = (List<MTablePrivilege>)
query.executeWithArray(tableName, dbName);
LOG.debug("Done executing query for listTableGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertTable(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalAllTableGrants");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<HiveObjectPrivilege> convertTable(List<MTablePrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MTablePrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
String table = priv.getTable().getTableName();
String database = priv.getTable().getDatabase().getName();
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.TABLE, database, table,
null, null);
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
private List<MPartitionPrivilege> listPrincipalAllPartitionGrants(
String principalName, PrincipalType principalType) {
boolean success = false;
List<MPartitionPrivilege> mSecurityTabPartList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllPartitionGrants");
Query query = pm.newQuery(MPartitionPrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MPartitionPrivilege>) query.execute(
principalName, principalType.toString());
LOG
.debug("Done executing query for listPrincipalAllPartitionGrants");
pm.retrieveAll(mSecurityTabPartList);
success = commitTransaction();
LOG
.debug("Done retrieving all objects for listPrincipalAllPartitionGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityTabPartList;
}
@Override
public List<HiveObjectPrivilege> listPrincipalPartitionGrantsAll(
String principalName, PrincipalType principalType) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalPartitionGrantsAll");
List<MPartitionPrivilege> mSecurityTabPartList;
if (principalName != null && principalType != null) {
Query query = pm.newQuery(MPartitionPrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MPartitionPrivilege>)
query.execute(principalName, principalType.toString());
} else {
Query query = pm.newQuery(MPartitionPrivilege.class);
mSecurityTabPartList = (List<MPartitionPrivilege>) query.execute();
}
LOG.debug("Done executing query for listPrincipalPartitionGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public List<HiveObjectPrivilege> listPartitionGrantsAll(
String dbName, String tableName, String partitionName) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalPartitionGrantsAll");
Query query = pm.newQuery(MPartitionPrivilege.class,
"partition.table.tableName == t3 && partition.table.database.name == t4 && " +
"partition.partitionName == t5");
query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5");
List<MPartitionPrivilege> mSecurityTabPartList = (List<MPartitionPrivilege>)
query.executeWithArray(tableName, dbName, partitionName);
LOG.debug("Done executing query for listPrincipalPartitionGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertPartition(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalPartitionGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<HiveObjectPrivilege> convertPartition(List<MPartitionPrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MPartitionPrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
MPartition mpartition = priv.getPartition();
MTable mtable = mpartition.getTable();
MDatabase mdatabase = mtable.getDatabase();
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.PARTITION,
mdatabase.getName(), mtable.getTableName(), mpartition.getValues(), null);
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
private List<MTableColumnPrivilege> listPrincipalAllTableColumnGrants(
String principalName, PrincipalType principalType) {
boolean success = false;
List<MTableColumnPrivilege> mSecurityColumnList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllTableColumnGrants");
Query query = pm.newQuery(MTableColumnPrivilege.class,
"principalName == t1 && principalType == t2");
query
.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityColumnList = (List<MTableColumnPrivilege>) query.execute(
principalName, principalType.toString());
LOG.debug("Done executing query for listPrincipalAllTableColumnGrants");
pm.retrieveAll(mSecurityColumnList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalAllTableColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColumnList;
}
@Override
public List<HiveObjectPrivilege> listPrincipalTableColumnGrantsAll(
String principalName, PrincipalType principalType) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalTableColumnGrantsAll");
List<MTableColumnPrivilege> mSecurityTabPartList;
if (principalName != null && principalType != null) {
Query query = pm.newQuery(MTableColumnPrivilege.class,
"principalName == t1 && principalType == t2");
query.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityTabPartList = (List<MTableColumnPrivilege>)
query.execute(principalName, principalType.toString());
} else {
Query query = pm.newQuery(MTableColumnPrivilege.class);
mSecurityTabPartList = (List<MTableColumnPrivilege>) query.execute();
}
LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public List<HiveObjectPrivilege> listTableColumnGrantsAll(
String dbName, String tableName, String columnName) {
boolean success = false;
try {
openTransaction();
LOG.debug("Executing listPrincipalTableColumnGrantsAll");
Query query = pm.newQuery(MTableColumnPrivilege.class,
"table.tableName == t3 && table.database.name == t4 && columnName == t5");
query.declareParameters("java.lang.String t3, java.lang.String t4, java.lang.String t5");
List<MTableColumnPrivilege> mSecurityTabPartList = (List<MTableColumnPrivilege>)
query.executeWithArray(tableName, dbName, columnName);
LOG.debug("Done executing query for listPrincipalTableColumnGrantsAll");
pm.retrieveAll(mSecurityTabPartList);
List<HiveObjectPrivilege> result = convertTableCols(mSecurityTabPartList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalTableColumnGrantsAll");
return result;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private List<HiveObjectPrivilege> convertTableCols(List<MTableColumnPrivilege> privs) {
List<HiveObjectPrivilege> result = new ArrayList<HiveObjectPrivilege>();
for (MTableColumnPrivilege priv : privs) {
String pname = priv.getPrincipalName();
PrincipalType ptype = PrincipalType.valueOf(priv.getPrincipalType());
MTable mtable = priv.getTable();
MDatabase mdatabase = mtable.getDatabase();
HiveObjectRef objectRef = new HiveObjectRef(HiveObjectType.COLUMN,
mdatabase.getName(), mtable.getTableName(), null, priv.getColumnName());
PrivilegeGrantInfo grantor = new PrivilegeGrantInfo(priv.getPrivilege(), priv.getCreateTime(),
priv.getGrantor(), PrincipalType.valueOf(priv.getGrantorType()), priv.getGrantOption());
result.add(new HiveObjectPrivilege(objectRef, pname, ptype, grantor));
}
return result;
}
@SuppressWarnings("unchecked")
private List<MPartitionColumnPrivilege> listPrincipalAllPartitionColumnGrants(
String principalName, PrincipalType principalType) {
boolean success = false;
List<MPartitionColumnPrivilege> mSecurityColumnList = null;
try {
openTransaction();
LOG.debug("Executing listPrincipalAllTableColumnGrants");
Query query = pm.newQuery(MPartitionColumnPrivilege.class,
"principalName == t1 && principalType == t2");
query
.declareParameters("java.lang.String t1, java.lang.String t2");
mSecurityColumnList = (List<MPartitionColumnPrivilege>) query.execute(
principalName, principalType.toString());
LOG.debug("Done executing query for listPrincipalAllTableColumnGrants");
pm.retrieveAll(mSecurityColumnList);
success = commitTransaction();
LOG.debug("Done retrieving all objects for listPrincipalAllTableColumnGrants");
} finally {
if (!success) {
rollbackTransaction();
}
}
return mSecurityColumnList;
}
@Override
public boolean isPartitionMarkedForEvent(String dbName, String tblName,
Map<String, String> partName, PartitionEventType evtType) throws UnknownTableException,
MetaException, InvalidPartitionException, UnknownPartitionException {
Collection<MPartitionEvent> partEvents;
boolean success = false;
LOG.debug("Begin Executing isPartitionMarkedForEvent");
try{
openTransaction();
Query query = pm.newQuery(MPartitionEvent.class, "dbName == t1 && tblName == t2 && partName == t3 && eventType == t4");
query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3, int t4");
Table tbl = getTable(dbName, tblName); // Make sure dbName and tblName are valid.
if(null == tbl) {
throw new UnknownTableException("Table: "+ tblName + " is not found.");
}
partEvents = (Collection<MPartitionEvent>) query.executeWithArray(dbName, tblName, getPartitionStr(tbl, partName), evtType.getValue());
pm.retrieveAll(partEvents);
success = commitTransaction();
LOG.debug("Done executing isPartitionMarkedForEvent");
} finally{
if (!success) {
rollbackTransaction();
}
}
return (partEvents != null && !partEvents.isEmpty()) ? true : false;
}
@Override
public Table markPartitionForEvent(String dbName, String tblName, Map<String,String> partName,
PartitionEventType evtType) throws MetaException, UnknownTableException, InvalidPartitionException, UnknownPartitionException {
LOG.debug("Begin executing markPartitionForEvent");
boolean success = false;
Table tbl = null;
try{
openTransaction();
tbl = getTable(dbName, tblName); // Make sure dbName and tblName are valid.
if(null == tbl) {
throw new UnknownTableException("Table: "+ tblName + " is not found.");
}
pm.makePersistent(new MPartitionEvent(dbName,tblName,getPartitionStr(tbl, partName), evtType.getValue()));
success = commitTransaction();
LOG.debug("Done executing markPartitionForEvent");
} finally {
if(!success) {
rollbackTransaction();
}
}
return tbl;
}
private String getPartitionStr(Table tbl, Map<String,String> partName) throws InvalidPartitionException{
if(tbl.getPartitionKeysSize() != partName.size()){
throw new InvalidPartitionException("Number of partition columns in table: "+ tbl.getPartitionKeysSize() +
" doesn't match with number of supplied partition values: "+partName.size());
}
final List<String> storedVals = new ArrayList<String>(tbl.getPartitionKeysSize());
for(FieldSchema partKey : tbl.getPartitionKeys()){
String partVal = partName.get(partKey.getName());
if(null == partVal) {
throw new InvalidPartitionException("No value found for partition column: "+partKey.getName());
}
storedVals.add(partVal);
}
return join(storedVals,',');
}
/** The following API
*
* - executeJDOQLSelect
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public Collection<?> executeJDOQLSelect(String query) {
boolean committed = false;
Collection<?> result = null;
try {
openTransaction();
Query q = pm.newQuery(query);
result = (Collection<?>) q.execute();
committed = commitTransaction();
if (committed) {
return result;
} else {
return null;
}
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
/** The following API
*
* - executeJDOQLUpdate
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public long executeJDOQLUpdate(String query) {
boolean committed = false;
long numUpdated = 0;
try {
openTransaction();
Query q = pm.newQuery(query);
numUpdated = (Long) q.execute();
committed = commitTransaction();
if (committed) {
return numUpdated;
} else {
return -1;
}
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
/** The following API
*
* - listFSRoots
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public Set<String> listFSRoots() {
boolean committed = false;
Set<String> fsRoots = new HashSet<String>();
try {
openTransaction();
Query query = pm.newQuery(MDatabase.class);
List<MDatabase> mDBs = (List<MDatabase>) query.execute();
pm.retrieveAll(mDBs);
for (MDatabase mDB:mDBs) {
fsRoots.add(mDB.getLocationUri());
}
committed = commitTransaction();
if (committed) {
return fsRoots;
} else {
return null;
}
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
private boolean shouldUpdateURI(URI onDiskUri, URI inputUri) {
String onDiskHost = onDiskUri.getHost();
String inputHost = inputUri.getHost();
int onDiskPort = onDiskUri.getPort();
int inputPort = inputUri.getPort();
String onDiskScheme = onDiskUri.getScheme();
String inputScheme = inputUri.getScheme();
//compare ports
if (inputPort != -1) {
if (inputPort != onDiskPort) {
return false;
}
}
//compare schemes
if (inputScheme != null) {
if (onDiskScheme == null) {
return false;
}
if (!inputScheme.equalsIgnoreCase(onDiskScheme)) {
return false;
}
}
//compare hosts
if (onDiskHost != null) {
if (!inputHost.equalsIgnoreCase(onDiskHost)) {
return false;
}
} else {
return false;
}
return true;
}
public class UpdateMDatabaseURIRetVal {
private List<String> badRecords;
private Map<String, String> updateLocations;
UpdateMDatabaseURIRetVal(List<String> badRecords, Map<String, String> updateLocations) {
this.badRecords = badRecords;
this.updateLocations = updateLocations;
}
public List<String> getBadRecords() {
return badRecords;
}
public void setBadRecords(List<String> badRecords) {
this.badRecords = badRecords;
}
public Map<String, String> getUpdateLocations() {
return updateLocations;
}
public void setUpdateLocations(Map<String, String> updateLocations) {
this.updateLocations = updateLocations;
}
}
/** The following APIs
*
* - updateMDatabaseURI
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public UpdateMDatabaseURIRetVal updateMDatabaseURI(URI oldLoc, URI newLoc, boolean dryRun) {
boolean committed = false;
Map<String, String> updateLocations = new HashMap<String, String>();
List<String> badRecords = new ArrayList<String>();
UpdateMDatabaseURIRetVal retVal = null;
try {
openTransaction();
Query query = pm.newQuery(MDatabase.class);
List<MDatabase> mDBs = (List<MDatabase>) query.execute();
pm.retrieveAll(mDBs);
for(MDatabase mDB:mDBs) {
URI locationURI = null;
String location = mDB.getLocationUri();
try {
locationURI = new URI(location);
} catch(URISyntaxException e) {
badRecords.add(location);
} catch (NullPointerException e) {
badRecords.add(location);
}
if (locationURI == null) {
badRecords.add(location);
} else {
if (shouldUpdateURI(locationURI, oldLoc)) {
String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString());
updateLocations.put(locationURI.toString(), dbLoc);
if (!dryRun) {
mDB.setLocationUri(dbLoc);
}
}
}
}
committed = commitTransaction();
if (committed) {
retVal = new UpdateMDatabaseURIRetVal(badRecords, updateLocations);
}
return retVal;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
public class UpdateMStorageDescriptorTblPropURIRetVal {
private List<String> badRecords;
private Map<String, String> updateLocations;
UpdateMStorageDescriptorTblPropURIRetVal(List<String> badRecords,
Map<String, String> updateLocations) {
this.badRecords = badRecords;
this.updateLocations = updateLocations;
}
public List<String> getBadRecords() {
return badRecords;
}
public void setBadRecords(List<String> badRecords) {
this.badRecords = badRecords;
}
public Map<String, String> getUpdateLocations() {
return updateLocations;
}
public void setUpdateLocations(Map<String, String> updateLocations) {
this.updateLocations = updateLocations;
}
}
/** The following APIs
*
* - updateMStorageDescriptorTblPropURI
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public UpdateMStorageDescriptorTblPropURIRetVal updateMStorageDescriptorTblPropURI(URI oldLoc,
URI newLoc, String tblPropKey, boolean isDryRun) {
boolean committed = false;
Map<String, String> updateLocations = new HashMap<String, String>();
List<String> badRecords = new ArrayList<String>();
UpdateMStorageDescriptorTblPropURIRetVal retVal = null;
try {
openTransaction();
Query query = pm.newQuery(MStorageDescriptor.class);
List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute();
pm.retrieveAll(mSDSs);
for(MStorageDescriptor mSDS:mSDSs) {
URI tablePropLocationURI = null;
if (mSDS.getParameters().containsKey(tblPropKey)) {
String tablePropLocation = mSDS.getParameters().get(tblPropKey);
try {
tablePropLocationURI = new URI(tablePropLocation);
} catch (URISyntaxException e) {
badRecords.add(tablePropLocation);
} catch (NullPointerException e) {
badRecords.add(tablePropLocation);
}
// if tablePropKey that was passed in lead to a valid URI resolution, update it if
//parts of it match the old-NN-loc, else add to badRecords
if (tablePropLocationURI == null) {
badRecords.add(tablePropLocation);
} else {
if (shouldUpdateURI(tablePropLocationURI, oldLoc)) {
String tblPropLoc = mSDS.getParameters().get(tblPropKey).replaceAll(oldLoc.toString(),
newLoc.toString());
updateLocations.put(tablePropLocationURI.toString(), tblPropLoc);
if (!isDryRun) {
mSDS.getParameters().put(tblPropKey, tblPropLoc);
}
}
}
}
}
committed = commitTransaction();
if (committed) {
retVal = new UpdateMStorageDescriptorTblPropURIRetVal(badRecords, updateLocations);
}
return retVal;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
public class UpdateMStorageDescriptorTblURIRetVal {
private List<String> badRecords;
private Map<String, String> updateLocations;
UpdateMStorageDescriptorTblURIRetVal(List<String> badRecords,
Map<String, String> updateLocations) {
this.badRecords = badRecords;
this.updateLocations = updateLocations;
}
public List<String> getBadRecords() {
return badRecords;
}
public void setBadRecords(List<String> badRecords) {
this.badRecords = badRecords;
}
public Map<String, String> getUpdateLocations() {
return updateLocations;
}
public void setUpdateLocations(Map<String, String> updateLocations) {
this.updateLocations = updateLocations;
}
}
/** The following APIs
*
* - updateMStorageDescriptorTblURI
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public UpdateMStorageDescriptorTblURIRetVal updateMStorageDescriptorTblURI(URI oldLoc, URI newLoc,
boolean isDryRun) {
boolean committed = false;
Map<String, String> updateLocations = new HashMap<String, String>();
List<String> badRecords = new ArrayList<String>();
UpdateMStorageDescriptorTblURIRetVal retVal = null;
try {
openTransaction();
Query query = pm.newQuery(MStorageDescriptor.class);
List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute();
pm.retrieveAll(mSDSs);
for(MStorageDescriptor mSDS:mSDSs) {
URI locationURI = null;
String location = mSDS.getLocation();
try {
locationURI = new URI(location);
} catch (URISyntaxException e) {
badRecords.add(location);
} catch (NullPointerException e) {
badRecords.add(location);
}
if (locationURI == null) {
badRecords.add(location);
} else {
if (shouldUpdateURI(locationURI, oldLoc)) {
String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString());
updateLocations.put(locationURI.toString(), tblLoc);
if (!isDryRun) {
mSDS.setLocation(tblLoc);
}
}
}
}
committed = commitTransaction();
if (committed) {
retVal = new UpdateMStorageDescriptorTblURIRetVal(badRecords, updateLocations);
}
return retVal;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
public class UpdateSerdeURIRetVal {
private List<String> badRecords;
private Map<String, String> updateLocations;
UpdateSerdeURIRetVal(List<String> badRecords, Map<String, String> updateLocations) {
this.badRecords = badRecords;
this.updateLocations = updateLocations;
}
public List<String> getBadRecords() {
return badRecords;
}
public void setBadRecords(List<String> badRecords) {
this.badRecords = badRecords;
}
public Map<String, String> getUpdateLocations() {
return updateLocations;
}
public void setUpdateLocations(Map<String, String> updateLocations) {
this.updateLocations = updateLocations;
}
}
/** The following APIs
*
* - updateSerdeURI
*
* is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift.
*
*/
public UpdateSerdeURIRetVal updateSerdeURI(URI oldLoc, URI newLoc, String serdeProp,
boolean isDryRun) {
boolean committed = false;
Map<String, String> updateLocations = new HashMap<String, String>();
List<String> badRecords = new ArrayList<String>();
UpdateSerdeURIRetVal retVal = null;
try {
openTransaction();
Query query = pm.newQuery(MSerDeInfo.class);
List<MSerDeInfo> mSerdes = (List<MSerDeInfo>) query.execute();
pm.retrieveAll(mSerdes);
for(MSerDeInfo mSerde:mSerdes) {
if (mSerde.getParameters().containsKey(serdeProp)) {
String schemaLoc = mSerde.getParameters().get(serdeProp);
URI schemaLocURI = null;
try {
schemaLocURI = new URI(schemaLoc);
} catch (URISyntaxException e) {
badRecords.add(schemaLoc);
} catch (NullPointerException e) {
badRecords.add(schemaLoc);
}
if (schemaLocURI == null) {
badRecords.add(schemaLoc);
} else {
if (shouldUpdateURI(schemaLocURI, oldLoc)) {
String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString());
updateLocations.put(schemaLocURI.toString(), newSchemaLoc);
if (!isDryRun) {
mSerde.getParameters().put(serdeProp, newSchemaLoc);
}
}
}
}
}
committed = commitTransaction();
if (committed) {
retVal = new UpdateSerdeURIRetVal(badRecords, updateLocations);
}
return retVal;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
private void writeMTableColumnStatistics(Table table, MTableColumnStatistics mStatsObj)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
String dbName = mStatsObj.getDbName();
String tableName = mStatsObj.getTableName();
String colName = mStatsObj.getColName();
LOG.info("Updating table level column statistics for db=" + dbName + " tableName=" + tableName
+ " colName=" + colName);
validateTableCols(table, Lists.newArrayList(colName));
List<MTableColumnStatistics> oldStats =
getMTableColumnStatistics(table, Lists.newArrayList(colName));
if (!oldStats.isEmpty()) {
assert oldStats.size() == 1;
StatObjectConverter.setFieldsIntoOldStats(mStatsObj, oldStats.get(0));
} else {
pm.makePersistent(mStatsObj);
}
}
private void writeMPartitionColumnStatistics(Table table, Partition partition,
MPartitionColumnStatistics mStatsObj) throws NoSuchObjectException,
MetaException, InvalidObjectException, InvalidInputException {
String dbName = mStatsObj.getDbName();
String tableName = mStatsObj.getTableName();
String partName = mStatsObj.getPartitionName();
String colName = mStatsObj.getColName();
LOG.info("Updating partition level column statistics for db=" + dbName + " tableName=" +
tableName + " partName=" + partName + " colName=" + colName);
boolean foundCol = false;
List<FieldSchema> colList = partition.getSd().getCols();
for (FieldSchema col : colList) {
if (col.getName().equals(mStatsObj.getColName().trim())) {
foundCol = true;
break;
}
}
if (!foundCol) {
throw new
NoSuchObjectException("Column " + colName +
" for which stats gathering is requested doesn't exist.");
}
List<MPartitionColumnStatistics> oldStats = getMPartitionColumnStatistics(
table, Lists.newArrayList(partName), Lists.newArrayList(colName));
if (!oldStats.isEmpty()) {
assert oldStats.size() == 1;
StatObjectConverter.setFieldsIntoOldStats(mStatsObj, oldStats.get(0));
} else {
pm.makePersistent(mStatsObj);
}
}
@Override
public boolean updateTableColumnStatistics(ColumnStatistics colStats)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
openTransaction();
try {
List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj();
ColumnStatisticsDesc statsDesc = colStats.getStatsDesc();
// DataNucleus objects get detached all over the place for no (real) reason.
// So let's not use them anywhere unless absolutely necessary.
Table table = ensureGetTable(statsDesc.getDbName(), statsDesc.getTableName());
for (ColumnStatisticsObj statsObj:statsObjs) {
// We have to get mtable again because DataNucleus.
MTableColumnStatistics mStatsObj = StatObjectConverter.convertToMTableColumnStatistics(
ensureGetMTable(statsDesc.getDbName(), statsDesc.getTableName()), statsDesc, statsObj);
writeMTableColumnStatistics(table, mStatsObj);
}
committed = commitTransaction();
return committed;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
@Override
public boolean updatePartitionColumnStatistics(ColumnStatistics colStats, List<String> partVals)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean committed = false;
try {
openTransaction();
List<ColumnStatisticsObj> statsObjs = colStats.getStatsObj();
ColumnStatisticsDesc statsDesc = colStats.getStatsDesc();
Table table = ensureGetTable(statsDesc.getDbName(), statsDesc.getTableName());
Partition partition = convertToPart(getMPartition(
statsDesc.getDbName(), statsDesc.getTableName(), partVals));
for (ColumnStatisticsObj statsObj:statsObjs) {
// We have to get partition again because DataNucleus
MPartition mPartition = getMPartition(
statsDesc.getDbName(), statsDesc.getTableName(), partVals);
if (partition == null) {
throw new NoSuchObjectException("Partition for which stats is gathered doesn't exist.");
}
MPartitionColumnStatistics mStatsObj =
StatObjectConverter.convertToMPartitionColumnStatistics(mPartition, statsDesc, statsObj);
writeMPartitionColumnStatistics(table, partition, mStatsObj);
}
committed = commitTransaction();
return committed;
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
private List<MTableColumnStatistics> getMTableColumnStatistics(
Table table, List<String> colNames) throws MetaException {
boolean committed = false;
openTransaction();
try {
List<MTableColumnStatistics> result = null;
validateTableCols(table, colNames);
Query query = pm.newQuery(MTableColumnStatistics.class);
String filter = "tableName == t1 && dbName == t2 && (";
String paramStr = "java.lang.String t1, java.lang.String t2";
Object[] params = new Object[colNames.size() + 2];
params[0] = table.getTableName();
params[1] = table.getDbName();
for (int i = 0; i < colNames.size(); ++i) {
filter += ((i == 0) ? "" : " || ") + "colName == c" + i;
paramStr += ", java.lang.String c" + i;
params[i + 2] = colNames.get(i);
}
filter += ")";
query.setFilter(filter);
query.declareParameters(paramStr);
result = (List<MTableColumnStatistics>) query.executeWithArray(params);
pm.retrieveAll(result);
if (result.size() > colNames.size()) {
throw new MetaException(
"Unexpected " + result.size() + " statistics for " + colNames.size() + " columns");
}
committed = commitTransaction();
return result;
} catch (Exception ex) {
LOG.error("Error retrieving statistics via jdo", ex);
if (ex instanceof MetaException) {
throw (MetaException)ex;
}
throw new MetaException(ex.getMessage());
} finally {
if (!committed) {
rollbackTransaction();
return Lists.newArrayList();
}
}
}
private void validateTableCols(Table table, List<String> colNames) throws MetaException {
List<FieldSchema> colList = table.getSd().getCols();
for (String colName : colNames) {
boolean foundCol = false;
for (FieldSchema mCol : colList) {
if (mCol.getName().equals(colName.trim())) {
foundCol = true;
break;
}
}
if (!foundCol) {
throw new MetaException("Column " + colName + " doesn't exist.");
}
}
}
@Override
public ColumnStatistics getTableColumnStatistics(String dbName, String tableName,
List<String> colNames) throws MetaException, NoSuchObjectException {
return getTableColumnStatisticsInternal(dbName, tableName, colNames, true, true);
}
protected ColumnStatistics getTableColumnStatisticsInternal(
String dbName, String tableName, final List<String> colNames, boolean allowSql,
boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetStatHelper(HiveStringUtils.normalizeIdentifier(dbName),
HiveStringUtils.normalizeIdentifier(tableName), allowSql, allowJdo) {
@Override
protected ColumnStatistics getSqlResult(GetHelper<ColumnStatistics> ctx) throws MetaException {
return directSql.getTableStats(dbName, tblName, colNames);
}
@Override
protected ColumnStatistics getJdoResult(
GetHelper<ColumnStatistics> ctx) throws MetaException, NoSuchObjectException {
List<MTableColumnStatistics> mStats = getMTableColumnStatistics(getTable(), colNames);
if (mStats.isEmpty()) return null;
// LastAnalyzed is stored per column, but thrift object has it per multiple columns.
// Luckily, nobody actually uses it, so we will set to lowest value of all columns for now.
ColumnStatisticsDesc desc = StatObjectConverter.getTableColumnStatisticsDesc(mStats.get(0));
List<ColumnStatisticsObj> statObjs = new ArrayList<ColumnStatisticsObj>(mStats.size());
for (MTableColumnStatistics mStat : mStats) {
if (desc.getLastAnalyzed() > mStat.getLastAnalyzed()) {
desc.setLastAnalyzed(mStat.getLastAnalyzed());
}
statObjs.add(StatObjectConverter.getTableColumnStatisticsObj(mStat));
}
return new ColumnStatistics(desc, statObjs);
}
}.run(true);
}
@Override
public List<ColumnStatistics> getPartitionColumnStatistics(String dbName, String tableName,
List<String> partNames, List<String> colNames) throws MetaException, NoSuchObjectException {
return getPartitionColumnStatisticsInternal(
dbName, tableName, partNames, colNames, true, true);
}
protected List<ColumnStatistics> getPartitionColumnStatisticsInternal(
String dbName, String tableName, final List<String> partNames, final List<String> colNames,
boolean allowSql, boolean allowJdo) throws MetaException, NoSuchObjectException {
return new GetListHelper<ColumnStatistics>(dbName, tableName, allowSql, allowJdo) {
@Override
protected List<ColumnStatistics> getSqlResult(
GetHelper<List<ColumnStatistics>> ctx) throws MetaException {
return directSql.getPartitionStats(dbName, tblName, partNames, colNames);
}
@Override
protected List<ColumnStatistics> getJdoResult(
GetHelper<List<ColumnStatistics>> ctx) throws MetaException, NoSuchObjectException {
List<MPartitionColumnStatistics> mStats =
getMPartitionColumnStatistics(getTable(), partNames, colNames);
List<ColumnStatistics> result = new ArrayList<ColumnStatistics>(
Math.min(mStats.size(), partNames.size()));
String lastPartName = null;
List<ColumnStatisticsObj> curList = null;
ColumnStatisticsDesc csd = null;
for (int i = 0; i <= mStats.size(); ++i) {
boolean isLast = i == mStats.size();
MPartitionColumnStatistics mStatsObj = isLast ? null : mStats.get(i);
String partName = isLast ? null : (String)mStatsObj.getPartitionName();
if (isLast || !partName.equals(lastPartName)) {
if (i != 0) {
result.add(new ColumnStatistics(csd, curList));
}
if (isLast) {
continue;
}
csd = StatObjectConverter.getPartitionColumnStatisticsDesc(mStatsObj);
curList = new ArrayList<ColumnStatisticsObj>(colNames.size());
}
curList.add(StatObjectConverter.getPartitionColumnStatisticsObj(mStatsObj));
lastPartName = partName;
}
return result;
}
}.run(true);
}
@Override
public AggrStats get_aggr_stats_for(String dbName, String tblName,
final List<String> partNames, final List<String> colNames) throws MetaException, NoSuchObjectException {
return new GetHelper<AggrStats>(dbName, tblName, true, false) {
@Override
protected AggrStats getSqlResult(GetHelper<AggrStats> ctx)
throws MetaException {
return directSql.aggrColStatsForPartitions(dbName, tblName, partNames,
colNames);
}
@Override
protected AggrStats getJdoResult(GetHelper<AggrStats> ctx)
throws MetaException, NoSuchObjectException {
// This is fast path for query optimizations, if we can find this info
// quickly using
// directSql, do it. No point in failing back to slow path here.
throw new MetaException("Jdo path is not implemented for stats aggr.");
}
@Override
protected String describeResult() {
return null;
}
}.run(true);
}
private List<MPartitionColumnStatistics> getMPartitionColumnStatistics(
Table table, List<String> partNames, List<String> colNames)
throws NoSuchObjectException, MetaException {
boolean committed = false;
MPartitionColumnStatistics mStatsObj = null;
try {
openTransaction();
// We are not going to verify SD for each partition. Just verify for the table.
validateTableCols(table, colNames);
boolean foundCol = false;
Query query = pm.newQuery(MPartitionColumnStatistics.class);
String paramStr = "java.lang.String t1, java.lang.String t2";
String filter = "tableName == t1 && dbName == t2 && (";
Object[] params = new Object[colNames.size() + partNames.size() + 2];
int i = 0;
params[i++] = table.getTableName();
params[i++] = table.getDbName();
int firstI = i;
for (String s : partNames) {
filter += ((i == firstI) ? "" : " || ") + "partitionName == p" + i;
paramStr += ", java.lang.String p" + i;
params[i++] = s;
}
filter += ") && (";
firstI = i;
for (String s : colNames) {
filter += ((i == firstI) ? "" : " || ") + "colName == c" + i;
paramStr += ", java.lang.String c" + i;
params[i++] = s;
}
filter += ")";
query.setFilter(filter);
query.declareParameters(paramStr);
query.setOrdering("partitionName ascending");
@SuppressWarnings("unchecked")
List<MPartitionColumnStatistics> result =
(List<MPartitionColumnStatistics>) query.executeWithArray(params);
pm.retrieveAll(result);
committed = commitTransaction();
return result;
} catch (Exception ex) {
LOG.error("Error retrieving statistics via jdo", ex);
if (ex instanceof MetaException) {
throw (MetaException)ex;
}
throw new MetaException(ex.getMessage());
} finally {
if (!committed) {
rollbackTransaction();
return Lists.newArrayList();
}
}
}
private void dropPartitionColumnStatisticsNoTxn(
String dbName, String tableName, List<String> partNames) throws MetaException {
ObjectPair<Query, Object[]> queryWithParams = makeQueryByPartitionNames(
dbName, tableName, partNames, MPartitionColumnStatistics.class,
"tableName", "dbName", "partition.partitionName");
queryWithParams.getFirst().deletePersistentAll(queryWithParams.getSecond());
}
@Override
public boolean deletePartitionColumnStatistics(String dbName, String tableName,
String partName, List<String> partVals, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException {
boolean ret = false;
if (dbName == null) {
dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
}
if (tableName == null) {
throw new InvalidInputException("Table name is null.");
}
try {
openTransaction();
MTable mTable = getMTable(dbName, tableName);
MPartitionColumnStatistics mStatsObj;
List<MPartitionColumnStatistics> mStatsObjColl;
if (mTable == null) {
throw new
NoSuchObjectException("Table " + tableName +
" for which stats deletion is requested doesn't exist");
}
MPartition mPartition =
getMPartition(dbName, tableName, partVals);
if (mPartition == null) {
throw new
NoSuchObjectException("Partition " + partName +
" for which stats deletion is requested doesn't exist");
}
Query query = pm.newQuery(MPartitionColumnStatistics.class);
String filter;
String parameters;
if (colName != null) {
filter = "partition.partitionName == t1 && dbName == t2 && tableName == t3 && " +
"colName == t4";
parameters = "java.lang.String t1, java.lang.String t2, " +
"java.lang.String t3, java.lang.String t4";
} else {
filter = "partition.partitionName == t1 && dbName == t2 && tableName == t3";
parameters = "java.lang.String t1, java.lang.String t2, java.lang.String t3";
}
query.setFilter(filter);
query
.declareParameters(parameters);
if (colName != null) {
query.setUnique(true);
mStatsObj = (MPartitionColumnStatistics)query.executeWithArray(partName.trim(),
HiveStringUtils.normalizeIdentifier(dbName),
HiveStringUtils.normalizeIdentifier(tableName),
HiveStringUtils.normalizeIdentifier(colName));
pm.retrieve(mStatsObj);
if (mStatsObj != null) {
pm.deletePersistent(mStatsObj);
} else {
throw new NoSuchObjectException("Column stats doesn't exist for db=" +dbName + " table="
+ tableName + " partition=" + partName + " col=" + colName);
}
} else {
mStatsObjColl= (List<MPartitionColumnStatistics>)query.execute(partName.trim(),
HiveStringUtils.normalizeIdentifier(dbName),
HiveStringUtils.normalizeIdentifier(tableName));
pm.retrieveAll(mStatsObjColl);
if (mStatsObjColl != null) {
pm.deletePersistentAll(mStatsObjColl);
} else {
throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName +
" table=" + tableName + " partition" + partName);
}
}
ret = commitTransaction();
} catch(NoSuchObjectException e) {
rollbackTransaction();
throw e;
} finally {
if (!ret) {
rollbackTransaction();
}
}
return ret;
}
@Override
public boolean deleteTableColumnStatistics(String dbName, String tableName, String colName)
throws NoSuchObjectException, MetaException, InvalidObjectException, InvalidInputException
{
boolean ret = false;
if (dbName == null) {
dbName = MetaStoreUtils.DEFAULT_DATABASE_NAME;
}
if (tableName == null) {
throw new InvalidInputException("Table name is null.");
}
try {
openTransaction();
MTable mTable = getMTable(dbName, tableName);
MTableColumnStatistics mStatsObj;
List<MTableColumnStatistics> mStatsObjColl;
if (mTable == null) {
throw new
NoSuchObjectException("Table " + tableName +
" for which stats deletion is requested doesn't exist");
}
Query query = pm.newQuery(MTableColumnStatistics.class);
String filter;
String parameters;
if (colName != null) {
filter = "table.tableName == t1 && dbName == t2 && colName == t3";
parameters = "java.lang.String t1, java.lang.String t2, java.lang.String t3";
} else {
filter = "table.tableName == t1 && dbName == t2";
parameters = "java.lang.String t1, java.lang.String t2";
}
query.setFilter(filter);
query
.declareParameters(parameters);
if (colName != null) {
query.setUnique(true);
mStatsObj = (MTableColumnStatistics)query.execute(
HiveStringUtils.normalizeIdentifier(tableName),
HiveStringUtils.normalizeIdentifier(dbName),
HiveStringUtils.normalizeIdentifier(colName));
pm.retrieve(mStatsObj);
if (mStatsObj != null) {
pm.deletePersistent(mStatsObj);
} else {
throw new NoSuchObjectException("Column stats doesn't exist for db=" +dbName + " table="
+ tableName + " col=" + colName);
}
} else {
mStatsObjColl= (List<MTableColumnStatistics>)query.execute(
HiveStringUtils.normalizeIdentifier(tableName),
HiveStringUtils.normalizeIdentifier(dbName));
pm.retrieveAll(mStatsObjColl);
if (mStatsObjColl != null) {
pm.deletePersistentAll(mStatsObjColl);
} else {
throw new NoSuchObjectException("Column stats doesn't exist for db=" + dbName +
" table=" + tableName);
}
}
ret = commitTransaction();
} catch(NoSuchObjectException e) {
rollbackTransaction();
throw e;
} finally {
if (!ret) {
rollbackTransaction();
}
}
return ret;
}
@Override
public long cleanupEvents() {
boolean commited = false;
long delCnt;
LOG.debug("Begin executing cleanupEvents");
Long expiryTime = HiveConf.getTimeVar(getConf(), ConfVars.METASTORE_EVENT_EXPIRY_DURATION, TimeUnit.MILLISECONDS);
Long curTime = System.currentTimeMillis();
try {
openTransaction();
Query query = pm.newQuery(MPartitionEvent.class,"curTime - eventTime > expiryTime");
query.declareParameters("java.lang.Long curTime, java.lang.Long expiryTime");
delCnt = query.deletePersistentAll(curTime, expiryTime);
commited = commitTransaction();
}
finally {
if (!commited) {
rollbackTransaction();
}
LOG.debug("Done executing cleanupEvents");
}
return delCnt;
}
private MDelegationToken getTokenFrom(String tokenId) {
Query query = pm.newQuery(MDelegationToken.class, "tokenIdentifier == tokenId");
query.declareParameters("java.lang.String tokenId");
query.setUnique(true);
return (MDelegationToken)query.execute(tokenId);
}
@Override
public boolean addToken(String tokenId, String delegationToken) {
LOG.debug("Begin executing addToken");
boolean committed = false;
MDelegationToken token;
try{
openTransaction();
token = getTokenFrom(tokenId);
if (token == null) {
// add Token, only if it already doesn't exist
pm.makePersistent(new MDelegationToken(tokenId, delegationToken));
}
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing addToken with status : " + committed);
return committed && (token == null);
}
@Override
public boolean removeToken(String tokenId) {
LOG.debug("Begin executing removeToken");
boolean committed = false;
MDelegationToken token;
try{
openTransaction();
token = getTokenFrom(tokenId);
if (null != token) {
pm.deletePersistent(token);
}
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing removeToken with status : " + committed);
return committed && (token != null);
}
@Override
public String getToken(String tokenId) {
LOG.debug("Begin executing getToken");
boolean committed = false;
MDelegationToken token;
try{
openTransaction();
token = getTokenFrom(tokenId);
if (null != token) {
pm.retrieve(token);
}
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing getToken with status : " + committed);
return (null == token) ? null : token.getTokenStr();
}
@Override
public List<String> getAllTokenIdentifiers() {
LOG.debug("Begin executing getAllTokenIdentifiers");
boolean committed = false;
List<MDelegationToken> tokens;
try{
openTransaction();
Query query = pm.newQuery(MDelegationToken.class);
tokens = (List<MDelegationToken>) query.execute();
pm.retrieveAll(tokens);
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing getAllTokenIdentifers with status : " + committed);
List<String> tokenIdents = new ArrayList<String>(tokens.size());
for (MDelegationToken token : tokens) {
tokenIdents.add(token.getTokenIdentifier());
}
return tokenIdents;
}
@Override
public int addMasterKey(String key) throws MetaException{
LOG.debug("Begin executing addMasterKey");
boolean committed = false;
MMasterKey masterKey = new MMasterKey(key);
try{
openTransaction();
pm.makePersistent(masterKey);
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing addMasterKey with status : " + committed);
if (committed) {
return ((IntIdentity)pm.getObjectId(masterKey)).getKey();
} else {
throw new MetaException("Failed to add master key.");
}
}
@Override
public void updateMasterKey(Integer id, String key) throws NoSuchObjectException, MetaException {
LOG.debug("Begin executing updateMasterKey");
boolean committed = false;
MMasterKey masterKey;
try{
openTransaction();
Query query = pm.newQuery(MMasterKey.class, "keyId == id");
query.declareParameters("java.lang.Integer id");
query.setUnique(true);
masterKey = (MMasterKey)query.execute(id);
if (null != masterKey) {
masterKey.setMasterKey(key);
}
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing updateMasterKey with status : " + committed);
if (null == masterKey) {
throw new NoSuchObjectException("No key found with keyId: " + id);
}
if (!committed) {
throw new MetaException("Though key is found, failed to update it. " + id);
}
}
@Override
public boolean removeMasterKey(Integer id) {
LOG.debug("Begin executing removeMasterKey");
boolean success = false;
MMasterKey masterKey;
try{
openTransaction();
Query query = pm.newQuery(MMasterKey.class, "keyId == id");
query.declareParameters("java.lang.Integer id");
query.setUnique(true);
masterKey = (MMasterKey)query.execute(id);
if (null != masterKey) {
pm.deletePersistent(masterKey);
}
success = commitTransaction();
} finally {
if(!success) {
rollbackTransaction();
}
}
LOG.debug("Done executing removeMasterKey with status : " + success);
return (null != masterKey) && success;
}
@Override
public String[] getMasterKeys() {
LOG.debug("Begin executing getMasterKeys");
boolean committed = false;
List<MMasterKey> keys;
try{
openTransaction();
Query query = pm.newQuery(MMasterKey.class);
keys = (List<MMasterKey>) query.execute();
pm.retrieveAll(keys);
committed = commitTransaction();
} finally {
if(!committed) {
rollbackTransaction();
}
}
LOG.debug("Done executing getMasterKeys with status : " + committed);
String[] masterKeys = new String[keys.size()];
for (int i = 0; i < keys.size(); i++) {
masterKeys[i] = keys.get(i).getMasterKey();
}
return masterKeys;
}
// compare hive version and metastore version
@Override
public void verifySchema() throws MetaException {
// If the schema version is already checked, then go ahead and use this metastore
if (isSchemaVerified.get()) {
return;
}
checkSchema();
}
public static void setSchemaVerified(boolean val) {
isSchemaVerified.set(val);
}
private synchronized void checkSchema() throws MetaException {
// recheck if it got verified by another thread while we were waiting
if (isSchemaVerified.get()) {
return;
}
boolean strictValidation =
HiveConf.getBoolVar(getConf(), HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION);
// read the schema version stored in metastore db
String schemaVer = getMetaStoreSchemaVersion();
if (schemaVer == null) {
if (strictValidation) {
throw new MetaException("Version information not found in metastore. ");
} else {
LOG.warn("Version information not found in metastore. "
+ HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION.toString() +
" is not enabled so recording the schema version " +
MetaStoreSchemaInfo.getHiveSchemaVersion());
setMetaStoreSchemaVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(),
"Set by MetaStore " + USER + "@" + HOSTNAME);
}
} else {
// metastore schema version is different than Hive distribution needs
if (schemaVer.equalsIgnoreCase(MetaStoreSchemaInfo.getHiveSchemaVersion())) {
LOG.debug("Found expected HMS version of " + schemaVer);
} else {
if (strictValidation) {
throw new MetaException("Hive Schema version "
+ MetaStoreSchemaInfo.getHiveSchemaVersion() +
" does not match metastore's schema version " + schemaVer +
" Metastore is not upgraded or corrupt");
} else {
LOG.error("Version information found in metastore differs " + schemaVer +
" from expected schema version " + MetaStoreSchemaInfo.getHiveSchemaVersion() +
". Schema verififcation is disabled " +
HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION + " so setting version.");
setMetaStoreSchemaVersion(MetaStoreSchemaInfo.getHiveSchemaVersion(),
"Set by MetaStore " + USER + "@" + HOSTNAME);
}
}
}
isSchemaVerified.set(true);
return;
}
// load the schema version stored in metastore db
@Override
public String getMetaStoreSchemaVersion() throws MetaException {
MVersionTable mSchemaVer;
try {
mSchemaVer = getMSchemaVersion();
} catch (NoSuchObjectException e) {
return null;
}
return mSchemaVer.getSchemaVersion();
}
@SuppressWarnings("unchecked")
private MVersionTable getMSchemaVersion()
throws NoSuchObjectException, MetaException {
boolean committed = false;
List<MVersionTable> mVerTables = new ArrayList<MVersionTable>();
try {
openTransaction();
Query query = pm.newQuery(MVersionTable.class);
try {
mVerTables = (List<MVersionTable>)query.execute();
pm.retrieveAll(mVerTables);
} catch (JDODataStoreException e) {
if (e.getCause() instanceof MissingTableException) {
throw new MetaException("Version table not found. " +
"The metastore is not upgraded to " + MetaStoreSchemaInfo.getHiveSchemaVersion());
} else {
throw e;
}
}
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
}
}
if (mVerTables.isEmpty()) {
throw new NoSuchObjectException("No matching version found");
}
if (mVerTables.size() > 1) {
String msg = "Metastore contains multiple versions (" + mVerTables.size() + ") ";
for (MVersionTable version : mVerTables) {
msg += "[ version = " + version.getSchemaVersion() + ", comment = " + version.getVersionComment() + " ] ";
}
throw new MetaException(msg.trim());
}
return mVerTables.get(0);
}
@Override
public void setMetaStoreSchemaVersion(String schemaVersion, String comment) throws MetaException {
MVersionTable mSchemaVer;
boolean commited = false;
boolean recordVersion =
HiveConf.getBoolVar(getConf(), HiveConf.ConfVars.METASTORE_SCHEMA_VERIFICATION_RECORD_VERSION);
if (!recordVersion) {
LOG.warn("setMetaStoreSchemaVersion called but recording version is disabled: " +
"version = " + schemaVersion + ", comment = " + comment);
return;
}
try {
mSchemaVer = getMSchemaVersion();
} catch (NoSuchObjectException e) {
// if the version doesn't exist, then create it
mSchemaVer = new MVersionTable();
}
mSchemaVer.setSchemaVersion(schemaVersion);
mSchemaVer.setVersionComment(comment);
try {
openTransaction();
pm.makePersistent(mSchemaVer);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
@Override
public boolean doesPartitionExist(String dbName, String tableName, List<String> partVals)
throws MetaException {
boolean success = false;
try {
openTransaction();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
tableName = HiveStringUtils.normalizeIdentifier(tableName);
// TODO: this could also be passed from upper layer; or this method should filter the list.
MTable mtbl = getMTable(dbName, tableName);
if (mtbl == null) {
success = commitTransaction();
return false;
}
Query query = pm.newQuery(
"select partitionName from org.apache.hadoop.hive.metastore.model.MPartition "
+ "where table.tableName == t1 && table.database.name == t2 && partitionName == t3");
query.declareParameters("java.lang.String t1, java.lang.String t2, java.lang.String t3");
query.setUnique(true);
query.setResult("partitionName");
String name = Warehouse.makePartName(
convertToFieldSchemas(mtbl.getPartitionKeys()), partVals);
String result = (String)query.execute(tableName, dbName, name);
success = commitTransaction();
return result != null;
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private void debugLog(String message) {
if (LOG.isDebugEnabled()) {
LOG.debug(message + getCallStack());
}
}
private static final int stackLimit = 5;
private String getCallStack() {
StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
int thislimit = Math.min(stackLimit, stackTrace.length);
StringBuilder sb = new StringBuilder();
sb.append(" at:");
for (int i = 4; i < thislimit; i++) {
sb.append("\n\t");
sb.append(stackTrace[i].toString());
}
return sb.toString();
}
private Function convertToFunction(MFunction mfunc) {
if (mfunc == null) {
return null;
}
Function func = new Function(mfunc.getFunctionName(),
mfunc.getDatabase().getName(),
mfunc.getClassName(),
mfunc.getOwnerName(),
PrincipalType.valueOf(mfunc.getOwnerType()),
mfunc.getCreateTime(),
FunctionType.findByValue(mfunc.getFunctionType()),
convertToResourceUriList(mfunc.getResourceUris()));
return func;
}
private MFunction convertToMFunction(Function func) throws InvalidObjectException {
if (func == null) {
return null;
}
MDatabase mdb = null;
try {
mdb = getMDatabase(func.getDbName());
} catch (NoSuchObjectException e) {
LOG.error(StringUtils.stringifyException(e));
throw new InvalidObjectException("Database " + func.getDbName() + " doesn't exist.");
}
MFunction mfunc = new MFunction(func.getFunctionName(),
mdb,
func.getClassName(),
func.getOwnerName(),
func.getOwnerType().name(),
func.getCreateTime(),
func.getFunctionType().getValue(),
convertToMResourceUriList(func.getResourceUris()));
return mfunc;
}
private List<ResourceUri> convertToResourceUriList(List<MResourceUri> mresourceUriList) {
List<ResourceUri> resourceUriList = null;
if (mresourceUriList != null) {
resourceUriList = new ArrayList<ResourceUri>(mresourceUriList.size());
for (MResourceUri mres : mresourceUriList) {
resourceUriList.add(
new ResourceUri(ResourceType.findByValue(mres.getResourceType()), mres.getUri()));
}
}
return resourceUriList;
}
private List<MResourceUri> convertToMResourceUriList(List<ResourceUri> resourceUriList) {
List<MResourceUri> mresourceUriList = null;
if (resourceUriList != null) {
mresourceUriList = new ArrayList<MResourceUri>(resourceUriList.size());
for (ResourceUri res : resourceUriList) {
mresourceUriList.add(new MResourceUri(res.getResourceType().getValue(), res.getUri()));
}
}
return mresourceUriList;
}
@Override
public void createFunction(Function func) throws InvalidObjectException, MetaException {
boolean committed = false;
try {
openTransaction();
MFunction mfunc = convertToMFunction(func);
pm.makePersistent(mfunc);
committed = commitTransaction();
} finally {
if (!committed) {
rollbackTransaction();
}
}
}
@Override
public void alterFunction(String dbName, String funcName, Function newFunction)
throws InvalidObjectException, MetaException {
boolean success = false;
try {
openTransaction();
funcName = HiveStringUtils.normalizeIdentifier(funcName);
dbName = HiveStringUtils.normalizeIdentifier(dbName);
MFunction newf = convertToMFunction(newFunction);
if (newf == null) {
throw new InvalidObjectException("new function is invalid");
}
MFunction oldf = getMFunction(dbName, funcName);
if (oldf == null) {
throw new MetaException("function " + funcName + " doesn't exist");
}
// For now only alter name, owner, class name, type
oldf.setFunctionName(HiveStringUtils.normalizeIdentifier(newf.getFunctionName()));
oldf.setDatabase(newf.getDatabase());
oldf.setOwnerName(newf.getOwnerName());
oldf.setOwnerType(newf.getOwnerType());
oldf.setClassName(newf.getClassName());
oldf.setFunctionType(newf.getFunctionType());
// commit the changes
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
}
@Override
public void dropFunction(String dbName, String funcName) throws MetaException,
NoSuchObjectException, InvalidObjectException, InvalidInputException {
boolean success = false;
try {
openTransaction();
MFunction mfunc = getMFunction(dbName, funcName);
pm.retrieve(mfunc);
if (mfunc != null) {
// TODO: When function privileges are implemented, they should be deleted here.
pm.deletePersistentAll(mfunc);
}
success = commitTransaction();
} finally {
if (!success) {
rollbackTransaction();
}
}
}
private MFunction getMFunction(String db, String function) {
MFunction mfunc = null;
boolean commited = false;
try {
openTransaction();
db = HiveStringUtils.normalizeIdentifier(db);
function = HiveStringUtils.normalizeIdentifier(function);
Query query = pm.newQuery(MFunction.class, "functionName == function && database.name == db");
query.declareParameters("java.lang.String function, java.lang.String db");
query.setUnique(true);
mfunc = (MFunction) query.execute(function, db);
pm.retrieve(mfunc);
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return mfunc;
}
@Override
public Function getFunction(String dbName, String funcName) throws MetaException {
boolean commited = false;
Function func = null;
try {
openTransaction();
func = convertToFunction(getMFunction(dbName, funcName));
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return func;
}
@Override
public List<String> getFunctions(String dbName, String pattern)
throws MetaException {
boolean commited = false;
List<String> funcs = null;
try {
openTransaction();
dbName = HiveStringUtils.normalizeIdentifier(dbName);
// Take the pattern and split it on the | to get all the composing
// patterns
String[] subpatterns = pattern.trim().split("\\|");
String query =
"select functionName from org.apache.hadoop.hive.metastore.model.MFunction "
+ "where database.name == dbName && (";
boolean first = true;
for (String subpattern : subpatterns) {
subpattern = "(?i)" + subpattern.replaceAll("\\*", ".*");
if (!first) {
query = query + " || ";
}
query = query + " functionName.matches(\"" + subpattern + "\")";
first = false;
}
query = query + ")";
Query q = pm.newQuery(query);
q.declareParameters("java.lang.String dbName");
q.setResult("functionName");
q.setOrdering("functionName ascending");
Collection names = (Collection) q.execute(dbName);
funcs = new ArrayList<String>();
for (Iterator i = names.iterator(); i.hasNext();) {
funcs.add((String) i.next());
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
return funcs;
}
@Override
public NotificationEventResponse getNextNotification(NotificationEventRequest rqst) {
boolean commited = false;
try {
openTransaction();
long lastEvent = rqst.getLastEvent();
Query query = pm.newQuery(MNotificationLog.class, "eventId > lastEvent");
query.declareParameters("java.lang.Long lastEvent");
query.setOrdering("eventId ascending");
Collection<MNotificationLog> events = (Collection)query.execute(lastEvent);
commited = commitTransaction();
if (events == null) {
return null;
}
Iterator<MNotificationLog> i = events.iterator();
NotificationEventResponse result = new NotificationEventResponse();
result.setEvents(new ArrayList<NotificationEvent>());
int maxEvents = rqst.getMaxEvents() > 0 ? rqst.getMaxEvents() : Integer.MAX_VALUE;
int numEvents = 0;
while (i.hasNext() && numEvents++ < maxEvents) {
result.addToEvents(translateDbToThrift(i.next()));
}
return result;
} finally {
if (!commited) {
rollbackTransaction();
return null;
}
}
}
@Override
public void addNotificationEvent(NotificationEvent entry) {
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MNotificationNextId.class);
Collection<MNotificationNextId> ids = (Collection) query.execute();
MNotificationNextId id = null;
boolean needToPersistId;
if (ids == null || ids.size() == 0) {
id = new MNotificationNextId(1L);
needToPersistId = true;
} else {
id = ids.iterator().next();
needToPersistId = false;
}
entry.setEventId(id.getNextEventId());
id.incrementEventId();
if (needToPersistId) pm.makePersistent(id);
pm.makePersistent(translateThriftToDb(entry));
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
@Override
public void cleanNotificationEvents(int olderThan) {
boolean commited = false;
try {
openTransaction();
long tmp = System.currentTimeMillis() / 1000 - olderThan;
int tooOld = (tmp > Integer.MAX_VALUE) ? 0 : (int)tmp;
Query query = pm.newQuery(MNotificationLog.class, "eventTime < tooOld");
query.declareParameters("java.lang.Integer tooOld");
Collection<MNotificationLog> toBeRemoved = (Collection)query.execute(tooOld);
if (toBeRemoved != null && toBeRemoved.size() > 0) {
pm.deletePersistent(toBeRemoved);
}
commited = commitTransaction();
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
@Override
public CurrentNotificationEventId getCurrentNotificationEventId() {
boolean commited = false;
try {
openTransaction();
Query query = pm.newQuery(MNotificationNextId.class);
Collection<MNotificationNextId> ids = (Collection)query.execute();
long id = 0;
if (ids != null && ids.size() > 0) {
id = ids.iterator().next().getNextEventId() - 1;
}
commited = commitTransaction();
return new CurrentNotificationEventId(id);
} finally {
if (!commited) {
rollbackTransaction();
}
}
}
private MNotificationLog translateThriftToDb(NotificationEvent entry) {
MNotificationLog dbEntry = new MNotificationLog();
dbEntry.setEventId(entry.getEventId());
dbEntry.setEventTime(entry.getEventTime());
dbEntry.setEventType(entry.getEventType());
dbEntry.setDbName(entry.getDbName());
dbEntry.setTableName(entry.getTableName());
dbEntry.setMessage(entry.getMessage());
return dbEntry;
}
private NotificationEvent translateDbToThrift(MNotificationLog dbEvent) {
NotificationEvent event = new NotificationEvent();
event.setEventId(dbEvent.getEventId());
event.setEventTime(dbEvent.getEventTime());
event.setEventType(dbEvent.getEventType());
event.setDbName(dbEvent.getDbName());
event.setTableName(dbEvent.getTableName());
event.setMessage((dbEvent.getMessage()));
return event;
}
}
|
package com.atguigu.gmall.sms.service;
import com.baomidou.mybatisplus.extension.service.IService;
import com.atguigu.gmall.sms.entity.SpuFullReductionEntity;
import com.atguigu.core.bean.PageVo;
import com.atguigu.core.bean.QueryCondition;
/**
* 商品满减信息
*
* @author abu
* @email lxf@atguigu.com
* @date 2020-04-10 22:45:33
*/
public interface SpuFullReductionService extends IService<SpuFullReductionEntity> {
PageVo queryPage(QueryCondition params);
}
|
/*
* Copyright 2012-2013 Gephi Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.gephi.graph.api.types;
import java.util.Arrays;
import org.gephi.graph.api.AttributeUtils;
import org.gephi.graph.api.TimeFormat;
import org.gephi.graph.impl.FormattingAndParsingUtils;
import org.joda.time.DateTimeZone;
/**
* Sorted set for timestamps.
*/
public final class TimestampSet implements TimeSet<Double> {
private double[] array;
private int size = 0;
/**
* Default constructor.
* <p>
* The set is empty with zero capacity.
*/
public TimestampSet() {
array = new double[0];
}
/**
* Constructor with capacity.
* <p>
* Using this constructor can improve performances if the number of timestamps
* is known in advance as it minimizes array resizes.
*
* @param capacity timestamp capacity
*/
public TimestampSet(int capacity) {
array = new double[capacity];
Arrays.fill(array, Double.MAX_VALUE);
}
/**
* Constructor with an initial timestamp set.
* <p>
* The given array must be sorted and contain no duplicates.
*
* @param arr initial set content
*/
public TimestampSet(double[] arr) {
array = new double[arr.length];
System.arraycopy(arr, 0, array, 0, arr.length);
size = arr.length;
}
@Override
public boolean add(Double timestamp) {
return addInner(timestamp) >= 0;
}
@Override
public boolean remove(Double timestamp) {
return removeInner(timestamp) >= 0;
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public boolean contains(Double timestamp) {
int index = Arrays.binarySearch(array, timestamp);
return index >= 0 && index < size;
}
@Override
public Double[] toArray() {
Double[] res = new Double[size];
for (int i = 0; i < size; i++) {
res[i] = array[i];
}
return res;
}
@Override
public double[] toPrimitiveArray() {
if (size < array.length) {
double[] res = new double[size];
System.arraycopy(array, 0, res, 0, size);
return res;
} else {
return array;
}
}
@Override
public void clear() {
size = 0;
array = new double[0];
}
private int addInner(double timestamp) {
int index = Arrays.binarySearch(array, 0, size, timestamp);
if (index < 0) {
int insertIndex = -index - 1;
if (size < array.length - 1) {
if (insertIndex < size) {
System.arraycopy(array, insertIndex, array, insertIndex + 1, size - insertIndex);
}
array[insertIndex] = timestamp;
} else {
double[] newArray = new double[array.length + 1];
System.arraycopy(array, 0, newArray, 0, insertIndex);
System.arraycopy(array, insertIndex, newArray, insertIndex + 1, array.length - insertIndex);
newArray[insertIndex] = timestamp;
array = newArray;
}
size++;
return insertIndex;
}
return -1;
}
private int removeInner(double timestamp) {
int index = Arrays.binarySearch(array, 0, size, timestamp);
if (index >= 0) {
int removeIndex = index;
if (removeIndex == size - 1) {
size--;
} else {
System.arraycopy(array, removeIndex + 1, array, removeIndex, size - removeIndex - 1);
size--;
}
return removeIndex;
}
return -1;
}
@Override
public int hashCode() {
int hash = 7;
hash = 37 * hash + this.size;
for (int i = 0; i < size; i++) {
double t = this.array[i];
hash = 37 * hash + (int) (Double.doubleToLongBits(t) ^ (Double.doubleToLongBits(t) >>> 32));
}
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final TimestampSet other = (TimestampSet) obj;
if (this.size != other.size) {
return false;
}
for (int i = 0; i < size; i++) {
double i1 = this.array[i];
double i2 = other.array[i];
if (i1 != i2) {
return false;
}
}
return true;
}
@Override
public String toString(TimeFormat timeFormat, DateTimeZone timeZone) {
if (size == 0) {
return FormattingAndParsingUtils.EMPTY_VALUE;
}
StringBuilder sb = new StringBuilder();
sb.append("<[");
for (int i = 0; i < size; i++) {
sb.append(AttributeUtils.printTimestampInFormat(array[i], timeFormat, timeZone));
if (i < size - 1) {
sb.append(", ");
}
}
sb.append("]>");
return sb.toString();
}
@Override
public String toString(TimeFormat timeFormat) {
return toString(timeFormat, null);
}
@Override
public String toString() {
return toString(TimeFormat.DOUBLE, null);
}
}
|
/*
* Copyright 2018, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.api.v1.http.apiserver;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.util.List;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonPropertyOrder({
"apiVersion",
"kind"
})
public class APIGroup
{
@NotNull
@JsonProperty("apiVersion")
private String apiVersion = "v1";
@NotNull
@JsonProperty("kind")
private String kind = "APIGroup";
@JsonProperty("name")
@Valid
private String name;
@JsonProperty("versions")
@Valid
private List<APIGroupVersion> versions;
@JsonProperty("preferredVersion")
@Valid
private APIGroupVersion preferredVersion;
@JsonProperty("serverAddressByClientCIDRs")
@Valid
private String serverAddressByClientCIDRs;
public APIGroup() {
}
public APIGroup(String apiVersion, String kind, String name, List<APIGroupVersion> versions, APIGroupVersion preferredVersion, String serverAddressByClientCIDRs) {
this.apiVersion = apiVersion;
this.kind = kind;
this.name = name;
this.versions = versions;
this.preferredVersion = preferredVersion;
this.serverAddressByClientCIDRs = serverAddressByClientCIDRs;
}
@JsonProperty("apiVersion")
public String getApiVersion() {
return apiVersion;
}
@JsonProperty("apiVersion")
public void setApiVersion(String apiVersion) {
this.apiVersion = apiVersion;
}
@JsonProperty("kind")
public String getKind() {
return kind;
}
@JsonProperty("kind")
public void setKind(String kind) {
this.kind = kind;
}
@JsonProperty("name")
public String getName() {
return name;
}
@JsonProperty("name")
public void setName(String name) {
this.name = name;
}
@JsonProperty("versions")
private List<APIGroupVersion> getVersions() {
return versions;
}
@JsonProperty("versions")
private void setVersions(List<APIGroupVersion> versions) {
this.versions = versions;
}
@JsonProperty("preferredVersion")
private APIGroupVersion getPreferredVersion() {
return preferredVersion;
}
@JsonProperty("preferredVersion")
private void setPreferredVersion(APIGroupVersion preferredVersion) {
this.preferredVersion = preferredVersion;
}
@JsonProperty("serverAddressByClientCIDRs")
private String getServerAddressByClientCIDRs() {
return serverAddressByClientCIDRs;
}
@JsonProperty("serverAddressByClientCIDRs")
private void setServerAddressByClientCIDRs(String serverAddressByClientCIDRs) {
this.serverAddressByClientCIDRs = serverAddressByClientCIDRs;
}
}
|
/*
* Copyright [2005] [University Corporation for Advanced Internet Development, Inc.]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.internet2.middleware.shibboleth.wayf.idpdisco;
import javax.xml.namespace.QName;
import org.opensaml.common.SAMLObject;
import org.opensaml.saml2.metadata.LocalizedString;
import org.opensaml.xml.LangBearing;
/**
* DisplayName.
*
* See IdP Discovery and Login UI Metadata Extension Profile.
*
* @author Rod Widdowson August 2010
*
* Reflects the UINFO in the IdP Discovery and Login UI Metadata Extension Profile/
* */
public interface DisplayName extends LocalizedName {
/** Element local name. */
public static final String DEFAULT_ELEMENT_LOCAL_NAME = "DisplayName";
/** Default element name. */
public static final QName DEFAULT_ELEMENT_NAME = new QName(UIInfo.MDUI_NS,
DEFAULT_ELEMENT_LOCAL_NAME, UIInfo.MDUI_PREFIX);
/**
* Gets the name of the EndPoint.
*
* @return the name of the EndPoint
*/
public LocalizedString getName();
/**
* Sets the EndPoint name.
*
* @param newName EndPoint name
*/
public void setName(LocalizedString newName);
}
|
//| Copyright - The University of Edinburgh 2018 |
//| |
//| Licensed under the Apache License, Version 2.0 (the "License"); |
//| you may not use this file except in compliance with the License. |
//| You may obtain a copy of the License at |
//| |
//| http://www.apache.org/licenses/LICENSE-2.0 |
//| |
//| Unless required by applicable law or agreed to in writing, software |
//| distributed under the License is distributed on an "AS IS" BASIS, |
//| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.|
//| See the License for the specific language governing permissions and |
//| limitations under the License. |
package uk.ac.ed.epcc.webapp.model;
import java.util.Date;
/** An interface for Factories that can delete data older than a specific date.
*
* This is primarily intended for reducing the size of debugging databases.
* @author Stephen Booth
*
*/
public interface TimePurgeFactory {
public void purgeOldData(Date epoch) throws Exception;
}
|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/language/v1beta2/language_service.proto
package com.google.cloud.language.v1beta2;
/**
* <pre>
* Represents a sentence in the input document.
* </pre>
*
* Protobuf type {@code google.cloud.language.v1beta2.Sentence}
*/
public final class Sentence extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.cloud.language.v1beta2.Sentence)
SentenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use Sentence.newBuilder() to construct.
private Sentence(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Sentence() {
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Sentence(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownFieldProto3(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
case 10: {
com.google.cloud.language.v1beta2.TextSpan.Builder subBuilder = null;
if (text_ != null) {
subBuilder = text_.toBuilder();
}
text_ = input.readMessage(com.google.cloud.language.v1beta2.TextSpan.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(text_);
text_ = subBuilder.buildPartial();
}
break;
}
case 18: {
com.google.cloud.language.v1beta2.Sentiment.Builder subBuilder = null;
if (sentiment_ != null) {
subBuilder = sentiment_.toBuilder();
}
sentiment_ = input.readMessage(com.google.cloud.language.v1beta2.Sentiment.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(sentiment_);
sentiment_ = subBuilder.buildPartial();
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.language.v1beta2.LanguageServiceProto.internal_static_google_cloud_language_v1beta2_Sentence_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.language.v1beta2.LanguageServiceProto.internal_static_google_cloud_language_v1beta2_Sentence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.language.v1beta2.Sentence.class, com.google.cloud.language.v1beta2.Sentence.Builder.class);
}
public static final int TEXT_FIELD_NUMBER = 1;
private com.google.cloud.language.v1beta2.TextSpan text_;
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public boolean hasText() {
return text_ != null;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public com.google.cloud.language.v1beta2.TextSpan getText() {
return text_ == null ? com.google.cloud.language.v1beta2.TextSpan.getDefaultInstance() : text_;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public com.google.cloud.language.v1beta2.TextSpanOrBuilder getTextOrBuilder() {
return getText();
}
public static final int SENTIMENT_FIELD_NUMBER = 2;
private com.google.cloud.language.v1beta2.Sentiment sentiment_;
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public boolean hasSentiment() {
return sentiment_ != null;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public com.google.cloud.language.v1beta2.Sentiment getSentiment() {
return sentiment_ == null ? com.google.cloud.language.v1beta2.Sentiment.getDefaultInstance() : sentiment_;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public com.google.cloud.language.v1beta2.SentimentOrBuilder getSentimentOrBuilder() {
return getSentiment();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (text_ != null) {
output.writeMessage(1, getText());
}
if (sentiment_ != null) {
output.writeMessage(2, getSentiment());
}
unknownFields.writeTo(output);
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (text_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getText());
}
if (sentiment_ != null) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getSentiment());
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.language.v1beta2.Sentence)) {
return super.equals(obj);
}
com.google.cloud.language.v1beta2.Sentence other = (com.google.cloud.language.v1beta2.Sentence) obj;
boolean result = true;
result = result && (hasText() == other.hasText());
if (hasText()) {
result = result && getText()
.equals(other.getText());
}
result = result && (hasSentiment() == other.hasSentiment());
if (hasSentiment()) {
result = result && getSentiment()
.equals(other.getSentiment());
}
result = result && unknownFields.equals(other.unknownFields);
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasText()) {
hash = (37 * hash) + TEXT_FIELD_NUMBER;
hash = (53 * hash) + getText().hashCode();
}
if (hasSentiment()) {
hash = (37 * hash) + SENTIMENT_FIELD_NUMBER;
hash = (53 * hash) + getSentiment().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.language.v1beta2.Sentence parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.language.v1beta2.Sentence parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.cloud.language.v1beta2.Sentence parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.language.v1beta2.Sentence prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Represents a sentence in the input document.
* </pre>
*
* Protobuf type {@code google.cloud.language.v1beta2.Sentence}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.cloud.language.v1beta2.Sentence)
com.google.cloud.language.v1beta2.SentenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.cloud.language.v1beta2.LanguageServiceProto.internal_static_google_cloud_language_v1beta2_Sentence_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.language.v1beta2.LanguageServiceProto.internal_static_google_cloud_language_v1beta2_Sentence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.language.v1beta2.Sentence.class, com.google.cloud.language.v1beta2.Sentence.Builder.class);
}
// Construct using com.google.cloud.language.v1beta2.Sentence.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
if (textBuilder_ == null) {
text_ = null;
} else {
text_ = null;
textBuilder_ = null;
}
if (sentimentBuilder_ == null) {
sentiment_ = null;
} else {
sentiment_ = null;
sentimentBuilder_ = null;
}
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.cloud.language.v1beta2.LanguageServiceProto.internal_static_google_cloud_language_v1beta2_Sentence_descriptor;
}
public com.google.cloud.language.v1beta2.Sentence getDefaultInstanceForType() {
return com.google.cloud.language.v1beta2.Sentence.getDefaultInstance();
}
public com.google.cloud.language.v1beta2.Sentence build() {
com.google.cloud.language.v1beta2.Sentence result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.cloud.language.v1beta2.Sentence buildPartial() {
com.google.cloud.language.v1beta2.Sentence result = new com.google.cloud.language.v1beta2.Sentence(this);
if (textBuilder_ == null) {
result.text_ = text_;
} else {
result.text_ = textBuilder_.build();
}
if (sentimentBuilder_ == null) {
result.sentiment_ = sentiment_;
} else {
result.sentiment_ = sentimentBuilder_.build();
}
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.language.v1beta2.Sentence) {
return mergeFrom((com.google.cloud.language.v1beta2.Sentence)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.language.v1beta2.Sentence other) {
if (other == com.google.cloud.language.v1beta2.Sentence.getDefaultInstance()) return this;
if (other.hasText()) {
mergeText(other.getText());
}
if (other.hasSentiment()) {
mergeSentiment(other.getSentiment());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.cloud.language.v1beta2.Sentence parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.cloud.language.v1beta2.Sentence) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.cloud.language.v1beta2.TextSpan text_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.TextSpan, com.google.cloud.language.v1beta2.TextSpan.Builder, com.google.cloud.language.v1beta2.TextSpanOrBuilder> textBuilder_;
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public boolean hasText() {
return textBuilder_ != null || text_ != null;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public com.google.cloud.language.v1beta2.TextSpan getText() {
if (textBuilder_ == null) {
return text_ == null ? com.google.cloud.language.v1beta2.TextSpan.getDefaultInstance() : text_;
} else {
return textBuilder_.getMessage();
}
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public Builder setText(com.google.cloud.language.v1beta2.TextSpan value) {
if (textBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
text_ = value;
onChanged();
} else {
textBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public Builder setText(
com.google.cloud.language.v1beta2.TextSpan.Builder builderForValue) {
if (textBuilder_ == null) {
text_ = builderForValue.build();
onChanged();
} else {
textBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public Builder mergeText(com.google.cloud.language.v1beta2.TextSpan value) {
if (textBuilder_ == null) {
if (text_ != null) {
text_ =
com.google.cloud.language.v1beta2.TextSpan.newBuilder(text_).mergeFrom(value).buildPartial();
} else {
text_ = value;
}
onChanged();
} else {
textBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public Builder clearText() {
if (textBuilder_ == null) {
text_ = null;
onChanged();
} else {
text_ = null;
textBuilder_ = null;
}
return this;
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public com.google.cloud.language.v1beta2.TextSpan.Builder getTextBuilder() {
onChanged();
return getTextFieldBuilder().getBuilder();
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
public com.google.cloud.language.v1beta2.TextSpanOrBuilder getTextOrBuilder() {
if (textBuilder_ != null) {
return textBuilder_.getMessageOrBuilder();
} else {
return text_ == null ?
com.google.cloud.language.v1beta2.TextSpan.getDefaultInstance() : text_;
}
}
/**
* <pre>
* The sentence text.
* </pre>
*
* <code>.google.cloud.language.v1beta2.TextSpan text = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.TextSpan, com.google.cloud.language.v1beta2.TextSpan.Builder, com.google.cloud.language.v1beta2.TextSpanOrBuilder>
getTextFieldBuilder() {
if (textBuilder_ == null) {
textBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.TextSpan, com.google.cloud.language.v1beta2.TextSpan.Builder, com.google.cloud.language.v1beta2.TextSpanOrBuilder>(
getText(),
getParentForChildren(),
isClean());
text_ = null;
}
return textBuilder_;
}
private com.google.cloud.language.v1beta2.Sentiment sentiment_ = null;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.Sentiment, com.google.cloud.language.v1beta2.Sentiment.Builder, com.google.cloud.language.v1beta2.SentimentOrBuilder> sentimentBuilder_;
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public boolean hasSentiment() {
return sentimentBuilder_ != null || sentiment_ != null;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public com.google.cloud.language.v1beta2.Sentiment getSentiment() {
if (sentimentBuilder_ == null) {
return sentiment_ == null ? com.google.cloud.language.v1beta2.Sentiment.getDefaultInstance() : sentiment_;
} else {
return sentimentBuilder_.getMessage();
}
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public Builder setSentiment(com.google.cloud.language.v1beta2.Sentiment value) {
if (sentimentBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
sentiment_ = value;
onChanged();
} else {
sentimentBuilder_.setMessage(value);
}
return this;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public Builder setSentiment(
com.google.cloud.language.v1beta2.Sentiment.Builder builderForValue) {
if (sentimentBuilder_ == null) {
sentiment_ = builderForValue.build();
onChanged();
} else {
sentimentBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public Builder mergeSentiment(com.google.cloud.language.v1beta2.Sentiment value) {
if (sentimentBuilder_ == null) {
if (sentiment_ != null) {
sentiment_ =
com.google.cloud.language.v1beta2.Sentiment.newBuilder(sentiment_).mergeFrom(value).buildPartial();
} else {
sentiment_ = value;
}
onChanged();
} else {
sentimentBuilder_.mergeFrom(value);
}
return this;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public Builder clearSentiment() {
if (sentimentBuilder_ == null) {
sentiment_ = null;
onChanged();
} else {
sentiment_ = null;
sentimentBuilder_ = null;
}
return this;
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public com.google.cloud.language.v1beta2.Sentiment.Builder getSentimentBuilder() {
onChanged();
return getSentimentFieldBuilder().getBuilder();
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
public com.google.cloud.language.v1beta2.SentimentOrBuilder getSentimentOrBuilder() {
if (sentimentBuilder_ != null) {
return sentimentBuilder_.getMessageOrBuilder();
} else {
return sentiment_ == null ?
com.google.cloud.language.v1beta2.Sentiment.getDefaultInstance() : sentiment_;
}
}
/**
* <pre>
* For calls to [AnalyzeSentiment][] or if
* [AnnotateTextRequest.Features.extract_document_sentiment][google.cloud.language.v1beta2.AnnotateTextRequest.Features.extract_document_sentiment] is set to
* true, this field will contain the sentiment for the sentence.
* </pre>
*
* <code>.google.cloud.language.v1beta2.Sentiment sentiment = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.Sentiment, com.google.cloud.language.v1beta2.Sentiment.Builder, com.google.cloud.language.v1beta2.SentimentOrBuilder>
getSentimentFieldBuilder() {
if (sentimentBuilder_ == null) {
sentimentBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.language.v1beta2.Sentiment, com.google.cloud.language.v1beta2.Sentiment.Builder, com.google.cloud.language.v1beta2.SentimentOrBuilder>(
getSentiment(),
getParentForChildren(),
isClean());
sentiment_ = null;
}
return sentimentBuilder_;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFieldsProto3(unknownFields);
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.language.v1beta2.Sentence)
}
// @@protoc_insertion_point(class_scope:google.cloud.language.v1beta2.Sentence)
private static final com.google.cloud.language.v1beta2.Sentence DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.language.v1beta2.Sentence();
}
public static com.google.cloud.language.v1beta2.Sentence getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Sentence>
PARSER = new com.google.protobuf.AbstractParser<Sentence>() {
public Sentence parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Sentence(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Sentence> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Sentence> getParserForType() {
return PARSER;
}
public com.google.cloud.language.v1beta2.Sentence getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
/*
* Copyright (C) 2016-2021 The lgou2w <lgou2w@hotmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lgou2w.ldk.bukkit.packet;
import com.lgou2w.ldk.reflect.FieldAccessor;
import com.lgou2w.ldk.reflect.FuzzyReflection;
import com.lgou2w.ldk.reflect.MethodAccessor;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.entity.Player;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.function.Supplier;
import static com.lgou2w.ldk.bukkit.reflect.MinecraftReflection.getCraftBukkitClass;
import static com.lgou2w.ldk.bukkit.reflect.MinecraftReflection.getMinecraftClass;
public final class PacketFactory {
private PacketFactory() { }
/// NMS Classes
@NotNull public final static Class<?> CLASS_PACKET;
@NotNull public final static Class<?> CLASS_PACKET_LISTENER;
@NotNull public final static Class<?> CLASS_NETWORK_MANAGER;
@NotNull public final static Class<?> CLASS_PLAYER_CONNECTION;
@NotNull public final static Class<?> CLASS_ENTITY;
@NotNull public final static Class<?> CLASS_ENTITY_PLAYER;
@NotNull public final static Class<?> CLASS_CRAFT_ENTITY;
static {
try {
CLASS_PACKET = getMinecraftClass("Packet");
CLASS_PACKET_LISTENER = getMinecraftClass("PacketListener");
CLASS_NETWORK_MANAGER = getMinecraftClass("NetworkManager");
CLASS_PLAYER_CONNECTION = getMinecraftClass("PlayerConnection");
CLASS_ENTITY = getMinecraftClass("Entity");
CLASS_ENTITY_PLAYER = getMinecraftClass("EntityPlayer");
CLASS_CRAFT_ENTITY = getCraftBukkitClass("entity.CraftEntity");
} catch (ClassNotFoundException e) {
throw new RuntimeException("Error in initializing PacketFactory internal static block:", e);
}
}
/// NMS Accessors
// OBC.entity.CraftEntity -> public NMS.Entity getHandle();
final static Supplier<@NotNull MethodAccessor<Object, Object>> METHOD_CRAFT_ENTITY_GET_HANDLE
= FuzzyReflection.lazySupplier(CLASS_CRAFT_ENTITY, true, fuzzy -> fuzzy
.useMethodMatcher()
.withoutModifiers(Modifier.STATIC)
.withModifiers(Modifier.PUBLIC)
.withType(CLASS_ENTITY)
.resultAccessor("Missing match: OBC.entity.CraftEntity -> Method: public NMS.Entity getHandle()"));
// NMS.EntityPlayer -> public NMS.PlayerConnection playerConnection;
final static Supplier<@NotNull FieldAccessor<Object, Object>> FIELD_ENTITY_PLAYER_CONNECTION
= FuzzyReflection.lazySupplier(CLASS_ENTITY_PLAYER, true, fuzzy -> fuzzy
.useFieldMatcher()
.withoutModifiers(Modifier.STATIC)
.withType(CLASS_PLAYER_CONNECTION)
.resultAccessor("Missing match: NMS.EntityPlayer -> Field: public NMS.PlayerConnection playerConnection"));
// NMS.PlayerConnection -> public NMS.NetworkManager networkManager;
final static Supplier<@NotNull FieldAccessor<Object, Object>> FIELD_PLAYER_CONNECTION_MANAGER
= FuzzyReflection.lazySupplier(CLASS_PLAYER_CONNECTION, true, fuzzy -> fuzzy
.useFieldMatcher()
.withoutModifiers(Modifier.STATIC)
.withType(CLASS_NETWORK_MANAGER)
.resultAccessor("Missing match: NMS.PlayerConnection -> Field: public NMS.NetworkManager networkManager"));
// NMS.PlayerConnection -> public void sendPacket(NMS.Packet);
final static Supplier<@NotNull MethodAccessor<Object, Object>> METHOD_PLAYER_CONNECTION_SEND_PACKET
= FuzzyReflection.lazySupplier(CLASS_PLAYER_CONNECTION, true, fuzzy -> fuzzy
.useMethodMatcher()
.withoutModifiers(Modifier.STATIC)
.withArgs(CLASS_PACKET)
.resultAccessor("Missing match: NMS.PlayerConnection -> Method: public void sendPacket(NMS.Packet)"));
// NMS.Packet -> public void process(NMS.PacketListener);
final static Supplier<@NotNull MethodAccessor<Object, Object>> METHOD_PACKET_PROCESS
= FuzzyReflection.lazySupplier(CLASS_PACKET, true, fuzzy -> fuzzy
.useMethodMatcher()
.withoutModifiers(Modifier.STATIC)
.withArgs(CLASS_PACKET_LISTENER)
.resultAccessor("Missing match: NMS.Packet -> Method: public void process(NMS.PacketListener)"));
@NotNull
@SuppressWarnings("ConstantConditions")
static Object getPlayerHandle(@NotNull Player player) {
return METHOD_CRAFT_ENTITY_GET_HANDLE.get().invoke(player);
}
static void validatePacket(Object packet) {
if (!CLASS_PACKET.isInstance(packet))
throw new IllegalArgumentException("Value type of the instance does not match. (Expected: " + CLASS_PACKET + ")");
}
static void sendPacket0(Object packet, Player receiver) {
Object handle = getPlayerHandle(receiver);
Object connection = FIELD_ENTITY_PLAYER_CONNECTION.get().get(handle);
METHOD_PLAYER_CONNECTION_SEND_PACKET.get().invoke(connection, packet);
}
static void processPacket0(Object packet, Player sender) {
Object handle = getPlayerHandle(sender);
Object connection = FIELD_ENTITY_PLAYER_CONNECTION.get().get(handle);
METHOD_PACKET_PROCESS.get().invoke(packet, connection);
}
@Contract("null, _ -> fail")
static Player[] nearbyPlayers(Location center, double range) {
Objects.requireNonNull(center, "center");
World world = center.getWorld();
if (world == null)
throw new IllegalArgumentException("The world at this location is a null.");
List<Player> result = new ArrayList<>();
double squared = range < 1.0 ? 1.0 : range * range;
for (Player player : world.getPlayers()) {
if (player.getLocation().distanceSquared(center) <= squared)
result.add(player);
}
return result.toArray(new Player[0]);
}
/// Public API
@Contract("null, _ -> fail; _, null -> fail")
public static void sendPacket(Object packet, Player receiver) {
validatePacket(packet);
Objects.requireNonNull(receiver, "receiver");
sendPacket0(packet, receiver);
}
@Contract("null, _ -> fail; _, null -> fail")
public static void sendPacket(Object packet, Player[] receivers) {
validatePacket(packet);
Objects.requireNonNull(receivers, "receivers");
for (Player receiver : receivers)
sendPacket0(packet, receiver);
}
@Contract("null -> fail")
public static void sendPacketToAll(Object packet) {
validatePacket(packet);
sendPacket(packet, Bukkit.getOnlinePlayers().toArray(new Player[0]));
}
@Contract("null, _, _ -> fail; _, null, _ -> fail")
public static void sendPacketToNearby(Object packet, Location center, double range) {
validatePacket(packet);
Player[] players = nearbyPlayers(center, range);
sendPacket(packet, players);
}
@Contract("null, _ -> fail; _, null -> fail")
public static void processPacket(Object packet, Player sender) {
validatePacket(packet);
Objects.requireNonNull(sender, "sender");
processPacket0(packet, sender);
}
@Contract("null, _ -> fail; _, null -> fail")
public static void processPacket(Object packet, Player[] senders) {
validatePacket(packet);
Objects.requireNonNull(senders, "senders");
for (Player sender : senders)
processPacket0(packet, sender);
}
@Contract("null -> fail")
public static void processPacketToAll(Object packet) {
validatePacket(packet);
processPacket(packet, Bukkit.getOnlinePlayers().toArray(new Player[0]));
}
@Contract("null, _, _ -> fail; _, null, _ -> fail")
public static void processPacketToNearby(Object packet, Location center, double range) {
validatePacket(packet);
Player[] players = nearbyPlayers(center, range);
processPacket(packet, players);
}
}
|
package com.github.jknack.handlebars;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class CollectionsLengthTest extends AbstractTest {
public static class SizeAndLength {
int size;
int length;
public int getSize() {
return size;
}
public void setSize(int size) {
this.size = size;
}
public int getLength() {
return length;
}
public void setLength(int length) {
this.length = length;
}
}
@Test
public void collectionLengthTest() throws IOException {
List<String> list = new ArrayList<>();
list.add("a");
shouldCompileTo("{{this.length}}", list, "1");
}
@Test
public void otherClassSizeAndLength() throws IOException {
SizeAndLength sizeAndLength = new SizeAndLength();
sizeAndLength.length = 5;
sizeAndLength.size = 4;
shouldCompileTo("{{this.length}}", sizeAndLength, "5");
shouldCompileTo("{{this.size}}", sizeAndLength, "4");
}
}
|
/*
* Copyright 2015 Ripple OSI
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.rippleosi.patient.mdtreports.model;
import java.util.Date;
/**
*/
public class MDTReportSummary {
private String source;
private String sourceId;
private String serviceTeam;
private Date dateOfRequest;
private Date dateOfMeeting;
public String getSource() {
return source;
}
public void setSource(String source) {
this.source = source;
}
public String getSourceId() {
return sourceId;
}
public void setSourceId(String sourceId) {
this.sourceId = sourceId;
}
public String getServiceTeam() {
return serviceTeam;
}
public void setServiceTeam(String serviceTeam) {
this.serviceTeam = serviceTeam;
}
public Date getDateOfRequest() {
return dateOfRequest;
}
public void setDateOfRequest(Date dateOfRequest) {
this.dateOfRequest = dateOfRequest;
}
public Date getDateOfMeeting() {
return dateOfMeeting;
}
public void setDateOfMeeting(Date dateOfMeeting) {
this.dateOfMeeting = dateOfMeeting;
}
}
|
/*
* Copyright 2015 United States Government, as represented by the Administrator
* of the National Aeronautics and Space Administration. All Rights Reserved.
* 2017-2021 The jConstraints Authors
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.constraints.types;
import gov.nasa.jpf.constraints.casts.CastOperation;
import gov.nasa.jpf.constraints.exceptions.ImpreciseRepresentationException;
public class NamedSort implements Type<Void> {
private String name;
public NamedSort(String name) {
this.name = name;
}
@Override
public String getName() {
return name;
}
@Override
public String[] getOtherNames() {
return new String[0];
}
@Override
public Class<Void> getCanonicalClass() {
return null;
}
@Override
public Class<?>[] getOtherClasses() {
return new Class[0];
}
@Override
public Void cast(Object other) {
return null;
}
@Override
public Void getDefaultValue() {
return null;
}
@Override
public Type<?> getSuperType() {
return null;
}
@Override
public <O> CastOperation<? super O, ? extends Void> cast(Type<O> fromType) {
return null;
}
@Override
public <O> CastOperation<? super O, ? extends Void> requireCast(Type<O> fromType) {
return null;
}
@Override
public Void parse(String string) throws ImpreciseRepresentationException {
return null;
}
}
|
package codeine.db.mysql.connectors;
import codeine.db.ProjectsConfigurationConnector;
import codeine.db.mysql.DbUtils;
import codeine.jsons.global.ExperimentalConfJsonStore;
import codeine.jsons.project.ProjectJson;
import com.google.common.base.Function;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import org.apache.log4j.Logger;
import javax.inject.Inject;
import java.sql.ResultSet;
import java.util.Map;
public class ProjectsConfigurationMysqlConnector implements ProjectsConfigurationConnector{
private static final Logger log = Logger.getLogger(ProjectsConfigurationMysqlConnector.class);
@Inject
private DbUtils dbUtils;
@Inject
private Gson gson;
@Inject private ExperimentalConfJsonStore webConfJsonStore;
private static final String TABLE_NAME = "ProjectsConfiguration";
public ProjectsConfigurationMysqlConnector() {
super();
}
public ProjectsConfigurationMysqlConnector(DbUtils dbUtils, Gson gson, ExperimentalConfJsonStore webConfJsonStore) {
super();
this.dbUtils = dbUtils;
this.gson = gson;
this.webConfJsonStore = webConfJsonStore;
}
public void createTables() {
if (webConfJsonStore.get().readonly_web_server()) {
log.info("read only mode");
return;
}
String colsDefinition = "project_name CHAR(100) NOT NULL PRIMARY KEY, data MEDIUMTEXT";
dbUtils.executeUpdate("create table if not exists " + TABLE_NAME + " (" + colsDefinition + ")");
}
@Override
public Map<String, ProjectJson> getAllProjects() {
final Map<String, ProjectJson> $ = Maps.newHashMap();
Function<ResultSet, Void> function = input -> {
String json = null;
String project = null;
try {
project = input.getString("project_name");
json = input.getString("data");
$.put(project, gson.fromJson(json, ProjectJson.class));
} catch (Exception e) {
log.error("json is " + json);
log.error("failed to read project from database " + project, e);
}
return null;
};
dbUtils.executeQuery("select * from " + TABLE_NAME, function);
return $;
}
@Override
public void updateProject(ProjectJson project){
log.info("updating project in database " + project.name() + " " + dbUtils);
if (webConfJsonStore.get().readonly_web_server()) {
log.info("read only mode");
return;
}
int executeUpdate = dbUtils.executeUpdate("REPLACE INTO "+TABLE_NAME+" (project_name, data) VALUES (?, ?)", project.name(), gson.toJson(project));
if (executeUpdate == 0) {
throw new RuntimeException("failed to update project " + project.name());
}
}
@Override
public void deleteProject(ProjectJson project) {
log.info("deleting project from database " + project.name());
if (webConfJsonStore.get().readonly_web_server()) {
log.info("read only mode");
return;
}
int deleted = dbUtils.executeUpdate("DELETE FROM "+TABLE_NAME+" WHERE project_name = '" + project.name() + "'");
if (deleted == 0) {
throw new RuntimeException("failed to delete project " + project.name());
}
}
@Override
public String getKey() {
return dbUtils.server();
}
}
|
package skrelpoid.orderjson;
import java.util.Comparator;
import com.badlogic.gdx.math.MathUtils;
import com.badlogic.gdx.utils.JsonValue;
public class JsonComparator implements Comparator<JsonValue> {
// returns 0 if o1 and 02 are equal, -1 if o1s name comes before o2 name in
// the alphabet, 1 if o1s name comes after o2s name in the alphabet
@Override
public int compare(JsonValue o1, JsonValue o2) {
int compareValue = o1.name().compareTo(o2.name());
// limit return value to -1, 0 and 1
return MathUtils.clamp(compareValue, -1, 1);
}
}
|
package us.ihmc.robotics.alphaToAlpha;
import static us.ihmc.robotics.Assert.*;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Disabled;
import us.ihmc.yoVariables.registry.YoRegistry;
import us.ihmc.yoVariables.variable.YoDouble;
/**
* Created by Peter on 9/11/2016.
*/
public class YoMiniJerkUpAndDownAlphaToAlphaTest
{
private double EPSILON = 1e-6;
@Test
public void testInvalidYoVariables()
{
YoRegistry registry = new YoRegistry("dummy");
YoDouble startOfRampUp = new YoDouble("startOfRampUp", registry);
YoDouble endOfRamp = new YoDouble("endOfRamp", registry);
YoDouble startOfRampDown = new YoDouble("startOfRampDown", registry);
YoDouble endOfRampDown = new YoDouble("endOfRampDown", registry);
YoMiniJerkUpAndDownAlphaToAlpha yoMiniJerkUpAndDownAlphaToAlpha = new YoMiniJerkUpAndDownAlphaToAlpha(startOfRampUp, endOfRamp, startOfRampDown, endOfRampDown);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
startOfRampUp.set(0.1);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
endOfRamp.set(0.2);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
startOfRampDown.set(0.3);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
endOfRampDown.set(1.0);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
endOfRampDown.set(0.9);
startOfRampDown.set(0.95);
testRangeOfAlphas(0.0, yoMiniJerkUpAndDownAlphaToAlpha);
}
@Test
public void testValidYoVariables()
{
YoRegistry registry = new YoRegistry("dummy");
YoDouble startOfRampUp = new YoDouble("startOfRampUp", registry);
YoDouble endOfRamp = new YoDouble("endOfRamp", registry);
YoDouble startOfRampDown = new YoDouble("startOfRampDown", registry);
YoDouble endOfRampDown = new YoDouble("endOfRampDown", registry);
YoMiniJerkUpAndDownAlphaToAlpha yoVariableRampUpAndDownAlphaToAlpha = new YoMiniJerkUpAndDownAlphaToAlpha(startOfRampUp, endOfRamp, startOfRampDown, endOfRampDown);
startOfRampUp.set(0.1);
endOfRamp.set(0.3);
startOfRampDown.set(0.5);
endOfRampDown.set(0.7);
double value;
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.1);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.2);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.3);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.4);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.5);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.6);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.7);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.8);
assertEquals(value, 0.0, EPSILON);
startOfRampUp.set(0.8);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.1);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.2);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.3);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.4);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.5);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.6);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.7);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.8);
assertEquals(value, 0.0, EPSILON);
}
@Test
public void testHalfWay()
{
YoRegistry registry = new YoRegistry("dummy");
YoDouble startOfRampUp = new YoDouble("startOfRampUp", registry);
YoDouble endOfRamp = new YoDouble("endOfRamp", registry);
YoDouble startOfRampDown = new YoDouble("startOfRampDown", registry);
YoDouble endOfRampDown = new YoDouble("endOfRampDown", registry);
YoMiniJerkUpAndDownAlphaToAlpha yoVariableRampUpAndDownAlphaToAlpha = new YoMiniJerkUpAndDownAlphaToAlpha(startOfRampUp, endOfRamp, startOfRampDown, endOfRampDown);
startOfRampUp.set(0.1);
endOfRamp.set(0.3);
startOfRampDown.set(0.5);
endOfRampDown.set(0.7);
double value;
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.1);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.2);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.3);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.4);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.5);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.6);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.7);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.8);
assertEquals(value, 0.0, EPSILON);
}
@Test
public void testSmallDifferences()
{
YoRegistry registry = new YoRegistry("dummy");
YoDouble startOfRampUp = new YoDouble("startOfRampUp", registry);
YoDouble endOfRamp = new YoDouble("endOfRamp", registry);
YoDouble startOfRampDown = new YoDouble("startOfRampDown", registry);
YoDouble endOfRampDown = new YoDouble("endOfRampDown", registry);
YoMiniJerkUpAndDownAlphaToAlpha yoVariableRampUpAndDownAlphaToAlpha = new YoMiniJerkUpAndDownAlphaToAlpha(startOfRampUp, endOfRamp, startOfRampDown, endOfRampDown);
startOfRampUp.set(0.1);
endOfRamp.set(startOfRampUp.getDoubleValue() + EPSILON);
startOfRampDown.set(0.5);
endOfRampDown.set(startOfRampDown.getDoubleValue() + EPSILON);
double value;
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.1);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.1 + EPSILON/2.0);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.3);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.5);
assertEquals(value, 1.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.5 + EPSILON/2.0);
assertEquals(value, 0.5, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.7);
assertEquals(value, 0.0, EPSILON);
value = yoVariableRampUpAndDownAlphaToAlpha.getAlphaPrime(0.8);
assertEquals(value, 0.0, EPSILON);
}
private void testRangeOfAlphas(double expectedValue, YoMiniJerkUpAndDownAlphaToAlpha yoMiniJerkUpAndDownAlphaToAlpha)
{
for(double alpha = -1.0; alpha < 2.0 ; alpha = alpha + 0.001)
{
double value = yoMiniJerkUpAndDownAlphaToAlpha.getAlphaPrime(alpha);
assertEquals(value, expectedValue, EPSILON);
}
}
}
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.common.util.collections;
import java.util.AbstractQueue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.Consumer;
/**
* This implements a {@link BlockingQueue} backed by an array with no fixed capacity.
*
* <p>When the capacity is reached, data will be moved to a bigger array.
*/
public class GrowableArrayBlockingQueue<T> extends AbstractQueue<T> implements BlockingQueue<T> {
private final ReentrantLock headLock = new ReentrantLock();
private final PaddedInt headIndex = new PaddedInt();
private final PaddedInt tailIndex = new PaddedInt();
private final ReentrantLock tailLock = new ReentrantLock();
private final Condition isNotEmpty = headLock.newCondition();
private T[] data;
@SuppressWarnings("rawtypes")
private static final AtomicIntegerFieldUpdater<GrowableArrayBlockingQueue> SIZE_UPDATER = AtomicIntegerFieldUpdater
.newUpdater(GrowableArrayBlockingQueue.class, "size");
private volatile int size = 0;
public GrowableArrayBlockingQueue() {
this(64);
}
@SuppressWarnings("unchecked")
public GrowableArrayBlockingQueue(int initialCapacity) {
headIndex.value = 0;
tailIndex.value = 0;
int capacity = io.netty.util.internal.MathUtil.findNextPositivePowerOfTwo(initialCapacity);
data = (T[]) new Object[capacity];
}
@Override
public T remove() {
T item = poll();
if (item == null) {
throw new NoSuchElementException();
}
return item;
}
@Override
public T poll() {
headLock.lock();
try {
if (SIZE_UPDATER.get(this) > 0) {
T item = data[headIndex.value];
data[headIndex.value] = null;
headIndex.value = (headIndex.value + 1) & (data.length - 1);
SIZE_UPDATER.decrementAndGet(this);
return item;
} else {
return null;
}
} finally {
headLock.unlock();
}
}
@Override
public T element() {
T item = peek();
if (item == null) {
throw new NoSuchElementException();
}
return item;
}
@Override
public T peek() {
headLock.lock();
try {
if (SIZE_UPDATER.get(this) > 0) {
return data[headIndex.value];
} else {
return null;
}
} finally {
headLock.unlock();
}
}
@Override
public boolean offer(T e) {
// Queue is unbounded and it will never reject new items
put(e);
return true;
}
@Override
public void put(T e) {
tailLock.lock();
boolean wasEmpty = false;
try {
if (SIZE_UPDATER.get(this) == data.length) {
expandArray();
}
data[tailIndex.value] = e;
tailIndex.value = (tailIndex.value + 1) & (data.length - 1);
if (SIZE_UPDATER.getAndIncrement(this) == 0) {
wasEmpty = true;
}
} finally {
tailLock.unlock();
}
if (wasEmpty) {
headLock.lock();
try {
isNotEmpty.signal();
} finally {
headLock.unlock();
}
}
}
@Override
public boolean add(T e) {
put(e);
return true;
}
@Override
public boolean offer(T e, long timeout, TimeUnit unit) {
// Queue is unbounded and it will never reject new items
put(e);
return true;
}
@Override
public T take() throws InterruptedException {
headLock.lockInterruptibly();
try {
while (SIZE_UPDATER.get(this) == 0) {
isNotEmpty.await();
}
T item = data[headIndex.value];
data[headIndex.value] = null;
headIndex.value = (headIndex.value + 1) & (data.length - 1);
if (SIZE_UPDATER.decrementAndGet(this) > 0) {
// There are still entries to consume
isNotEmpty.signal();
}
return item;
} finally {
headLock.unlock();
}
}
@Override
public T poll(long timeout, TimeUnit unit) throws InterruptedException {
headLock.lockInterruptibly();
try {
long timeoutNanos = unit.toNanos(timeout);
while (SIZE_UPDATER.get(this) == 0) {
if (timeoutNanos <= 0) {
return null;
}
timeoutNanos = isNotEmpty.awaitNanos(timeoutNanos);
}
T item = data[headIndex.value];
data[headIndex.value] = null;
headIndex.value = (headIndex.value + 1) & (data.length - 1);
if (SIZE_UPDATER.decrementAndGet(this) > 0) {
// There are still entries to consume
isNotEmpty.signal();
}
return item;
} finally {
headLock.unlock();
}
}
@Override
public int remainingCapacity() {
return Integer.MAX_VALUE;
}
@Override
public int drainTo(Collection<? super T> c) {
return drainTo(c, Integer.MAX_VALUE);
}
@Override
public int drainTo(Collection<? super T> c, int maxElements) {
headLock.lock();
try {
int drainedItems = 0;
int size = SIZE_UPDATER.get(this);
while (size > 0 && drainedItems < maxElements) {
T item = data[headIndex.value];
data[headIndex.value] = null;
c.add(item);
headIndex.value = (headIndex.value + 1) & (data.length - 1);
--size;
++drainedItems;
}
if (SIZE_UPDATER.addAndGet(this, -drainedItems) > 0) {
// There are still entries to consume
isNotEmpty.signal();
}
return drainedItems;
} finally {
headLock.unlock();
}
}
@Override
public void clear() {
headLock.lock();
try {
int size = SIZE_UPDATER.get(this);
for (int i = 0; i < size; i++) {
data[headIndex.value] = null;
headIndex.value = (headIndex.value + 1) & (data.length - 1);
}
if (SIZE_UPDATER.addAndGet(this, -size) > 0) {
// There are still entries to consume
isNotEmpty.signal();
}
} finally {
headLock.unlock();
}
}
@Override
public boolean remove(Object o) {
tailLock.lock();
headLock.lock();
try {
int index = this.headIndex.value;
int size = this.size;
for (int i = 0; i < size; i++) {
T item = data[index];
if (Objects.equals(item, o)) {
remove(index);
return true;
}
index = (index + 1) & (data.length - 1);
}
} finally {
headLock.unlock();
tailLock.unlock();
}
return false;
}
private void remove(int index) {
int tailIndex = this.tailIndex.value;
if (index < tailIndex) {
System.arraycopy(data, index + 1, data, index, tailIndex - index - 1);
this.tailIndex.value--;
} else {
System.arraycopy(data, index + 1, data, index, data.length - index - 1);
data[data.length - 1] = data[0];
if (tailIndex > 0) {
System.arraycopy(data, 1, data, 0, tailIndex);
this.tailIndex.value--;
} else {
this.tailIndex.value = data.length - 1;
}
}
if (tailIndex > 0) {
data[tailIndex - 1] = null;
} else {
data[data.length - 1] = null;
}
SIZE_UPDATER.decrementAndGet(this);
}
@Override
public int size() {
return SIZE_UPDATER.get(this);
}
@Override
public Iterator<T> iterator() {
throw new UnsupportedOperationException();
}
public List<T> toList() {
List<T> list = new ArrayList<>(size());
forEach(list::add);
return list;
}
@Override
public void forEach(Consumer<? super T> action) {
tailLock.lock();
headLock.lock();
try {
int headIndex = this.headIndex.value;
int size = this.size;
for (int i = 0; i < size; i++) {
T item = data[headIndex];
action.accept(item);
headIndex = (headIndex + 1) & (data.length - 1);
}
} finally {
headLock.unlock();
tailLock.unlock();
}
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
tailLock.lock();
headLock.lock();
try {
int headIndex = this.headIndex.value;
int size = SIZE_UPDATER.get(this);
sb.append('[');
for (int i = 0; i < size; i++) {
T item = data[headIndex];
if (i > 0) {
sb.append(", ");
}
sb.append(item);
headIndex = (headIndex + 1) & (data.length - 1);
}
sb.append(']');
} finally {
headLock.unlock();
tailLock.unlock();
}
return sb.toString();
}
@SuppressWarnings("unchecked")
private void expandArray() {
// We already hold the tailLock
headLock.lock();
try {
int size = SIZE_UPDATER.get(this);
int newCapacity = data.length * 2;
T[] newData = (T[]) new Object[newCapacity];
int oldHeadIndex = headIndex.value;
int newTailIndex = 0;
for (int i = 0; i < size; i++) {
newData[newTailIndex++] = data[oldHeadIndex];
oldHeadIndex = (oldHeadIndex + 1) & (data.length - 1);
}
data = newData;
headIndex.value = 0;
tailIndex.value = size;
} finally {
headLock.unlock();
}
}
final static class PaddedInt {
private int value;
// Padding to avoid false sharing
public volatile int pi1 = 1;
public volatile long p1 = 1L, p2 = 2L, p3 = 3L, p4 = 4L, p5 = 5L, p6 = 6L;
public long exposeToAvoidOptimization() {
return pi1 + p1 + p2 + p3 + p4 + p5 + p6;
}
}
}
|
package org.openstack.atlas.rax.domain.helper;
import org.openstack.atlas.api.v1.extensions.rax.NetworkItem;
import org.openstack.atlas.api.v1.extensions.rax.NetworkItemType;
import org.openstack.atlas.rax.domain.entity.RaxAccessList;
import java.util.Set;
public final class ExtensionConverter {
public static org.openstack.atlas.api.v1.extensions.rax.AccessList convertAccessList(Set<RaxAccessList> accessListSet) {
org.openstack.atlas.api.v1.extensions.rax.AccessList dataModelAccessList = new org.openstack.atlas.api.v1.extensions.rax.AccessList();
for (RaxAccessList accessList : accessListSet) {
NetworkItem networkItem = new NetworkItem();
networkItem.setId(accessList.getId());
networkItem.setAddress(accessList.getIpAddress());
//networkItem.setIpVersion(org.openstack.atlas.api.v1.extensions.rax.IpVersion.fromValue(accessList.getIpVersion().name()));
networkItem.setType(NetworkItemType.fromValue(accessList.getType().name()));
dataModelAccessList.getNetworkItems().add(networkItem);
}
return dataModelAccessList;
}
}
|
package uk.gov.digital.ho.hocs.audit.export;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.junit.MockitoJUnitRunner;
import java.util.LinkedList;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
@RunWith (MockitoJUnitRunner.class)
public class HeaderConverterTest {
private HeaderConverter converter;
@Before
public void before() {
converter = new HeaderConverter();
}
@Test
public void convertDataHandlesEmptyArray() {
List<String> substitutedHeaders = converter.substitute(new LinkedList<>());
assertThat(substitutedHeaders).isNotNull();
assertThat(substitutedHeaders.size()).isEqualTo(0);
}
@Test
public void convertDataHandlesNull() {
assertThatThrownBy(() -> {
converter.substitute(null);
}).isInstanceOf(NullPointerException.class);
}
@Test
public void substituteExistingHeadersWithOrder() {
List<String> headers = Stream.of("title1", "title2", "title3", "title4").collect(Collectors.toList());
String[] expectedHeaders = new String[]{"New Title 1", "New Title 2", "New Title 3", "New Title 4"};
List<String> substitutedHeaders = converter.substitute(headers);
assertThat(substitutedHeaders).containsExactly(expectedHeaders);
}
@Test
public void substituteNonExistingHeadersWithOrder() {
List<String> headers = Stream.of("NonTitle1", "NonTitle2", "NonTitle3", "NonTitle4").collect(Collectors.toList());
String[] expectedHeaders = new String[]{"NonTitle1", "NonTitle2", "NonTitle3", "NonTitle4"};
List<String> substitutedHeaders = converter.substitute(headers);
assertThat(substitutedHeaders).containsExactly(expectedHeaders);
}
@Test
public void substituteExistingAndNonExistingMixedHeadersWithOrder() {
List<String> headers = Stream.of("NonTitle1", "title1", "title2", "NonTitle2", "NonTitle3", "title3", "NonTitle4", "title4")
.collect(Collectors.toList());
String[] expectedHeaders = new String[]{"NonTitle1", "New Title 1", "New Title 2", "NonTitle2", "NonTitle3",
"New Title 3", "NonTitle4", "New Title 4"};
List<String> substitutedHeaders = converter.substitute(headers);
assertThat(substitutedHeaders).containsExactly(expectedHeaders);
}
}
|
/*
* Copyright 2003 - 2017 The eFaps Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.efaps.esjp.sales.dashboard;
import java.math.BigDecimal;
import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Properties;
import java.util.TreeMap;
import org.apache.commons.lang3.BooleanUtils;
import org.apache.commons.lang3.EnumUtils;
import org.efaps.admin.event.Parameter;
import org.efaps.admin.program.esjp.EFapsApplication;
import org.efaps.admin.program.esjp.EFapsUUID;
import org.efaps.api.ui.IEsjpSnipplet;
import org.efaps.db.Instance;
import org.efaps.db.QueryBuilder;
import org.efaps.esjp.ci.CIERP;
import org.efaps.esjp.common.dashboard.AbstractDashboardPanel;
import org.efaps.esjp.common.datetime.JodaTimeUtils;
import org.efaps.esjp.common.parameter.ParameterUtil;
import org.efaps.esjp.erp.AbstractGroupedByDate_Base.DateGroup;
import org.efaps.esjp.erp.Currency;
import org.efaps.esjp.erp.NumberFormatter;
import org.efaps.esjp.erp.RateInfo;
import org.efaps.esjp.sales.report.PaymentSumReport;
import org.efaps.esjp.sales.report.PaymentSumReport_Base.DataBean;
import org.efaps.esjp.sales.report.PaymentSumReport_Base.DynPaymentSumReport;
import org.efaps.esjp.sales.report.PaymentSumReport_Base.PayDoc;
import org.efaps.esjp.ui.html.dojo.charting.Axis;
import org.efaps.esjp.ui.html.dojo.charting.ColumnsChart;
import org.efaps.esjp.ui.html.dojo.charting.Data;
import org.efaps.esjp.ui.html.dojo.charting.Orientation;
import org.efaps.esjp.ui.html.dojo.charting.PlotLayout;
import org.efaps.esjp.ui.html.dojo.charting.Serie;
import org.efaps.esjp.ui.html.dojo.charting.Util;
import org.efaps.util.EFapsException;
import org.joda.time.DateTime;
import net.sf.jasperreports.engine.data.JRBeanCollectionDataSource;
/**
* The Class PaymentPanel_Base.
*
* @author The eFaps Team
*/
@EFapsUUID("9a493043-0ce3-4c36-959b-f4712c62429c")
@EFapsApplication("eFapsApp-Sales")
public abstract class PaymentPanel_Base
extends AbstractDashboardPanel
implements IEsjpSnipplet
{
/** The Constant serialVersionUID. */
private static final long serialVersionUID = 1L;
/**
* Instantiates a new sales panel_ base.
*/
public PaymentPanel_Base()
{
super();
}
/**
* Instantiates a new sales panel_ base.
*
* @param _config the _config
*/
public PaymentPanel_Base(final String _config)
{
super(_config);
}
/**
* Gets the currency inst.
*
* @return the currency inst
* @throws EFapsException on error
*/
protected Instance getCurrencyInst()
throws EFapsException
{
return Instance.get(getConfig().getProperty("CurrencyOID", Currency.getBaseCurrency().getOid()));
}
/**
* Checks if is filter.
*
* @return true, if is filter
*/
protected boolean isFilter()
{
return BooleanUtils.toBoolean(getConfig().getProperty("Filter4Currency", "true"));
}
/**
* Gets the height.
*
* @return the height
*/
protected Integer getQuantity()
{
return Integer.valueOf(getConfig().getProperty("Quantity", "6"));
}
/**
* Gets the pay doc.
*
* @return the pay doc
*/
protected PayDoc getPayDoc()
{
return EnumUtils.getEnum(PayDoc.class, getConfig().getProperty("PayDoc", "BOTH"));
}
/**
* Gets the date group.
*
* @return the date group
*/
protected DateGroup getDateGroup()
{
return EnumUtils.getEnum(DateGroup.class, getConfig().getProperty("DateGroup", "MONTH"));
}
/**
* Checks if is date group start.
*
* @return true, if is date group start
*/
protected boolean isDateGroupStart()
{
return "true".equalsIgnoreCase(getConfig().getProperty("DateGroupStart", "false"));
}
@Override
public CharSequence getHtmlSnipplet()
throws EFapsException
{
final CharSequence ret;
if (isCached()) {
ret = getFromCache();
} else {
final Instance currencyInst = getCurrencyInst();
final boolean currencyFilter = isFilter();
final Map<String, Serie<Data>> series = new TreeMap<>();
final Map<String, Map<String, BigDecimal>> values = new TreeMap<>();
final Map<String, BigDecimal> totals = new HashMap<>();
final JRBeanCollectionDataSource source = new DynDataBean(this).getData();
for (final Object data: source.getData()) {
final DataBean bean = (DataBean) data;
final BigDecimal amount;
if (bean.getRateCurrencyInst().equals(currencyInst)) {
amount = bean.getAmount();
} else {
if (currencyFilter) {
amount = BigDecimal.ZERO;
} else {
RateInfo rateinfo = RateInfo.getRateInfo(bean.getRate());
if (rateinfo.getCurrencyInstance().equals(Currency.getBaseCurrency())
&& rateinfo.getCurrencyInstance().equals(rateinfo.getTargetCurrencyInstance())) {
rateinfo = new Currency().evaluateRateInfos(ParameterUtil.instance(), bean.getDate(),
rateinfo.getCurrencyInstance(), currencyInst)[2];
}
amount = Currency.convertToCurrency(ParameterUtil.instance(), bean.getAmount(),
rateinfo, null, currencyInst);
}
}
if (amount.compareTo(BigDecimal.ZERO) != 0) {
final Map<String, BigDecimal> map;
if (values.containsKey(bean.getPartial())) {
map = values.get(bean.getPartial());
} else {
map = new TreeMap<>();
values.put(bean.getPartial(), map);
}
final BigDecimal val;
if (map.containsKey(bean.getPaymentDocType())) {
val = map.get(bean.getPaymentDocType());
} else {
val = BigDecimal.ZERO;
}
map.put(bean.getPaymentDocType(), val.add(amount));
final BigDecimal total = totals.containsKey(bean.getPartial())
? totals.get(bean.getPartial()) : BigDecimal.ZERO;
totals.put(bean.getPartial(), total.add(amount));
if (!series.containsKey(bean.getPaymentDocType())) {
final Serie<Data> serie = new Serie<>();
serie.setName(bean.getPaymentDocType());
series.put(bean.getPaymentDocType(), serie);
}
}
}
final ColumnsChart chart = new ColumnsChart().setPlotLayout(PlotLayout.STACKED)
.setGap(5).setWidth(getWidth()).setHeight(getHeight());
chart.getPlots().get("default").addConfig("stroke", "{color: \"grey\", width: 1}");
final String title = getTitle();
if (title != null && !title.isEmpty()) {
chart.setTitle(getTitle());
}
chart.setOrientation(Orientation.VERTICAL_CHART_LEGEND);
final Axis xAxis = new Axis().setName("x").setMinorTicks(false);
chart.addAxis(xAxis);
final List<Map<String, Object>> labels = new ArrayList<>();
int x = 1;
final DecimalFormat fmtr = NumberFormatter.get().getTwoDigitsFormatter();
for (final Entry<String, Map<String, BigDecimal>> entry : values.entrySet()) {
final Map<String, Object> map = new HashMap<>();
map.put("value", x);
map.put("text", Util.wrap4String(entry.getKey()));
labels.add(map);
for (final Entry<String, Serie<Data>> serieEntry : series.entrySet()) {
if (x == 1) {
chart.addSerie(serieEntry.getValue());
}
final BigDecimal value;
if (entry.getValue().containsKey(serieEntry.getKey())) {
value = entry.getValue().get(serieEntry.getKey());
} else {
value = BigDecimal.ZERO;
}
final Data dataTmp = new Data().setSimple(false);
dataTmp.setXValue(null);
dataTmp.setYValue(value.intValue());
final StringBuilder toolTip = new StringBuilder().append(fmtr.format(value))
.append("/").append(fmtr.format(totals.get(entry.getKey())))
.append(" ").append(serieEntry.getValue().getName());
dataTmp.setTooltip(toolTip.toString());
serieEntry.getValue().addData(dataTmp);
}
x++;
}
if (!labels.isEmpty()) {
xAxis.setLabels(Util.mapCollectionToObjectArray(labels));
}
ret = chart.getHtmlSnipplet();
cache(ret);
}
return ret;
}
@Override
public boolean isVisible()
throws EFapsException
{
return true;
}
/**
* The Class DynDataBean.
*
*/
public static class DynDataBean
extends DynPaymentSumReport
{
/** The panel. */
private final PaymentPanel_Base panel;
/**
* Instantiates a new dyn data bean.
*
* @param _panel the panel
*/
public DynDataBean(final PaymentPanel_Base _panel)
{
super(new PaymentSumReport() {
@Override
public boolean isCached(final Parameter _parameter)
throws EFapsException
{
return false;
}
});
this.panel = _panel;
}
/**
* Gets the data.
*
* @return the data
* @throws EFapsException on error
*/
protected JRBeanCollectionDataSource getData()
throws EFapsException
{
return (JRBeanCollectionDataSource) super.createDataSource(ParameterUtil.instance());
}
@Override
protected Properties getProperties(final Parameter _parameter)
throws EFapsException
{
return this.panel.getConfig();
}
@Override
protected PayDoc getPayDoc(final Parameter _parameter)
throws EFapsException
{
return this.panel.getPayDoc();
}
@Override
protected DateGroup getDateGroup(final Parameter _parameter)
throws EFapsException
{
return this.panel.getDateGroup();
}
@Override
protected void add2QueryBuilder(final Parameter _parameter,
final QueryBuilder _queryBldr)
throws EFapsException
{
final DateTime dateFrom;
switch (this.panel.getDateGroup()) {
case YEAR:
dateFrom = this.panel.isDateGroupStart()
? new DateTime().minusYears(this.panel.getQuantity()).withDayOfYear(1)
: new DateTime().minusYears(this.panel.getQuantity());
break;
case HALFYEAR:
final DateTime hyDate = new DateTime().withFieldAdded(JodaTimeUtils.halfYears(),
-this.panel.getQuantity());
if (this.panel.isDateGroupStart()) {
dateFrom = hyDate.getMonthOfYear() < 7
? hyDate.withDayOfMonth(1).withMonthOfYear(1)
: hyDate.withDayOfMonth(1).withMonthOfYear(7);
} else {
dateFrom = hyDate;
}
break;
case QUARTER:
final DateTime qDate = new DateTime().withFieldAdded(JodaTimeUtils.quarters(),
-this.panel.getQuantity());
if (this.panel.isDateGroupStart()) {
if (qDate.getMonthOfYear() < 4) {
dateFrom = qDate.withDayOfMonth(1).withMonthOfYear(1);
} else if (qDate.getMonthOfYear() < 7) {
dateFrom = qDate.withDayOfMonth(1).withMonthOfYear(4);
} else if (qDate.getMonthOfYear() < 10) {
dateFrom = qDate.withDayOfMonth(1).withMonthOfYear(7);
} else {
dateFrom = qDate.withDayOfMonth(1).withMonthOfYear(10);
}
} else {
dateFrom = qDate;
}
break;
case WEEK:
dateFrom = this.panel.isDateGroupStart()
? new DateTime().minusWeeks(this.panel.getQuantity()).withDayOfWeek(1)
: new DateTime().minusWeeks(this.panel.getQuantity());
break;
case DAY:
dateFrom = new DateTime().minusDays(this.panel.getQuantity());
break;
case MONTH:
default:
dateFrom = this.panel.isDateGroupStart() ? new DateTime().minusMonths(this.panel.getQuantity())
.withDayOfMonth(1) : new DateTime().minusMonths(this.panel.getQuantity());
break;
}
_queryBldr.addWhereAttrGreaterValue(CIERP.DocumentAbstract.Date, dateFrom.withTimeAtStartOfDay()
.minusMinutes(1));
}
}
}
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codestarnotifications;
import javax.annotation.Generated;
import com.amazonaws.services.codestarnotifications.model.*;
/**
* Abstract implementation of {@code AWSCodeStarNotificationsAsync}. Convenient method forms pass through to the
* corresponding overload that takes a request object and an {@code AsyncHandler}, which throws an
* {@code UnsupportedOperationException}.
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class AbstractAWSCodeStarNotificationsAsync extends AbstractAWSCodeStarNotifications implements AWSCodeStarNotificationsAsync {
protected AbstractAWSCodeStarNotificationsAsync() {
}
@Override
public java.util.concurrent.Future<CreateNotificationRuleResult> createNotificationRuleAsync(CreateNotificationRuleRequest request) {
return createNotificationRuleAsync(request, null);
}
@Override
public java.util.concurrent.Future<CreateNotificationRuleResult> createNotificationRuleAsync(CreateNotificationRuleRequest request,
com.amazonaws.handlers.AsyncHandler<CreateNotificationRuleRequest, CreateNotificationRuleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteNotificationRuleResult> deleteNotificationRuleAsync(DeleteNotificationRuleRequest request) {
return deleteNotificationRuleAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteNotificationRuleResult> deleteNotificationRuleAsync(DeleteNotificationRuleRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteNotificationRuleRequest, DeleteNotificationRuleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DeleteTargetResult> deleteTargetAsync(DeleteTargetRequest request) {
return deleteTargetAsync(request, null);
}
@Override
public java.util.concurrent.Future<DeleteTargetResult> deleteTargetAsync(DeleteTargetRequest request,
com.amazonaws.handlers.AsyncHandler<DeleteTargetRequest, DeleteTargetResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<DescribeNotificationRuleResult> describeNotificationRuleAsync(DescribeNotificationRuleRequest request) {
return describeNotificationRuleAsync(request, null);
}
@Override
public java.util.concurrent.Future<DescribeNotificationRuleResult> describeNotificationRuleAsync(DescribeNotificationRuleRequest request,
com.amazonaws.handlers.AsyncHandler<DescribeNotificationRuleRequest, DescribeNotificationRuleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListEventTypesResult> listEventTypesAsync(ListEventTypesRequest request) {
return listEventTypesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListEventTypesResult> listEventTypesAsync(ListEventTypesRequest request,
com.amazonaws.handlers.AsyncHandler<ListEventTypesRequest, ListEventTypesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListNotificationRulesResult> listNotificationRulesAsync(ListNotificationRulesRequest request) {
return listNotificationRulesAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListNotificationRulesResult> listNotificationRulesAsync(ListNotificationRulesRequest request,
com.amazonaws.handlers.AsyncHandler<ListNotificationRulesRequest, ListNotificationRulesResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request) {
return listTagsForResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTagsForResourceResult> listTagsForResourceAsync(ListTagsForResourceRequest request,
com.amazonaws.handlers.AsyncHandler<ListTagsForResourceRequest, ListTagsForResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<ListTargetsResult> listTargetsAsync(ListTargetsRequest request) {
return listTargetsAsync(request, null);
}
@Override
public java.util.concurrent.Future<ListTargetsResult> listTargetsAsync(ListTargetsRequest request,
com.amazonaws.handlers.AsyncHandler<ListTargetsRequest, ListTargetsResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<SubscribeResult> subscribeAsync(SubscribeRequest request) {
return subscribeAsync(request, null);
}
@Override
public java.util.concurrent.Future<SubscribeResult> subscribeAsync(SubscribeRequest request,
com.amazonaws.handlers.AsyncHandler<SubscribeRequest, SubscribeResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request) {
return tagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<TagResourceResult> tagResourceAsync(TagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<TagResourceRequest, TagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UnsubscribeResult> unsubscribeAsync(UnsubscribeRequest request) {
return unsubscribeAsync(request, null);
}
@Override
public java.util.concurrent.Future<UnsubscribeResult> unsubscribeAsync(UnsubscribeRequest request,
com.amazonaws.handlers.AsyncHandler<UnsubscribeRequest, UnsubscribeResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request) {
return untagResourceAsync(request, null);
}
@Override
public java.util.concurrent.Future<UntagResourceResult> untagResourceAsync(UntagResourceRequest request,
com.amazonaws.handlers.AsyncHandler<UntagResourceRequest, UntagResourceResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
@Override
public java.util.concurrent.Future<UpdateNotificationRuleResult> updateNotificationRuleAsync(UpdateNotificationRuleRequest request) {
return updateNotificationRuleAsync(request, null);
}
@Override
public java.util.concurrent.Future<UpdateNotificationRuleResult> updateNotificationRuleAsync(UpdateNotificationRuleRequest request,
com.amazonaws.handlers.AsyncHandler<UpdateNotificationRuleRequest, UpdateNotificationRuleResult> asyncHandler) {
throw new java.lang.UnsupportedOperationException();
}
}
|
/**
* Copyright 2019 Adobe Systems Incorporated. All rights reserved.
* This file is licensed to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
* OF ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
*
**/
/*
*
* No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 6.0.0
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package io.swagger.client.model.widgets;
import java.util.Objects;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import io.swagger.client.model.widgets.ParticipantSecurityOption;
import java.io.IOException;
/**
* ParticipantSetMemberInfo
*/
@javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaClientCodegen", date = "2019-03-11T15:50:01.583+05:30")
public class ParticipantSetMemberInfo {
@SerializedName("email")
private String email = null;
@SerializedName("securityOption")
private ParticipantSecurityOption securityOption = null;
public ParticipantSetMemberInfo email(String email) {
this.email = email;
return this;
}
/**
* Email of the participant
* @return email
**/
@ApiModelProperty(value = "Email of the participant")
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public ParticipantSetMemberInfo securityOption(ParticipantSecurityOption securityOption) {
this.securityOption = securityOption;
return this;
}
/**
* Security options that apply to the participant
* @return securityOption
**/
@ApiModelProperty(value = "Security options that apply to the participant")
public ParticipantSecurityOption getSecurityOption() {
return securityOption;
}
public void setSecurityOption(ParticipantSecurityOption securityOption) {
this.securityOption = securityOption;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
ParticipantSetMemberInfo participantSetMemberInfo = (ParticipantSetMemberInfo) o;
return Objects.equals(this.email, participantSetMemberInfo.email) &&
Objects.equals(this.securityOption, participantSetMemberInfo.securityOption);
}
@Override
public int hashCode() {
return Objects.hash(email, securityOption);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class ParticipantSetMemberInfo {\n");
sb.append(" email: ").append(toIndentedString(email)).append("\n");
sb.append(" securityOption: ").append(toIndentedString(securityOption)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.ads.googleads.examples.advancedoperations;
import com.beust.jcommander.Parameter;
import com.google.ads.googleads.examples.utils.ArgumentNames;
import com.google.ads.googleads.examples.utils.CodeSampleParams;
import com.google.ads.googleads.lib.GoogleAdsClient;
import com.google.ads.googleads.v2.common.GmailAdInfo;
import com.google.ads.googleads.v2.common.GmailTeaser;
import com.google.ads.googleads.v2.enums.AdGroupAdStatusEnum.AdGroupAdStatus;
import com.google.ads.googleads.v2.enums.MediaTypeEnum.MediaType;
import com.google.ads.googleads.v2.enums.MimeTypeEnum.MimeType;
import com.google.ads.googleads.v2.errors.GoogleAdsError;
import com.google.ads.googleads.v2.errors.GoogleAdsException;
import com.google.ads.googleads.v2.resources.Ad;
import com.google.ads.googleads.v2.resources.AdGroupAd;
import com.google.ads.googleads.v2.resources.MediaFile;
import com.google.ads.googleads.v2.resources.MediaImage;
import com.google.ads.googleads.v2.services.AdGroupAdOperation;
import com.google.ads.googleads.v2.services.AdGroupAdServiceClient;
import com.google.ads.googleads.v2.services.MediaFileOperation;
import com.google.ads.googleads.v2.services.MediaFileServiceClient;
import com.google.ads.googleads.v2.services.MutateAdGroupAdsResponse;
import com.google.ads.googleads.v2.services.MutateMediaFileResult;
import com.google.ads.googleads.v2.services.MutateMediaFilesResponse;
import com.google.ads.googleads.v2.utils.ResourceNames;
import com.google.common.collect.ImmutableList;
import com.google.common.io.ByteStreams;
import com.google.protobuf.ByteString;
import com.google.protobuf.BytesValue;
import com.google.protobuf.StringValue;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.URL;
import java.util.HashMap;
import java.util.Map;
/**
* Adds a Gmail ad to a given ad group. The ad group's campaign needs to have an
* AdvertisingChannelType of DISPLAY and AdvertisingChannelSubType of DISPLAY_GMAIL_AD.
*/
public class AddGmailAd {
private static class AddGmailAdParams extends CodeSampleParams {
@Parameter(names = ArgumentNames.CUSTOMER_ID)
private Long customerId;
@Parameter(names = ArgumentNames.AD_GROUP_ID)
private Long adGroupId;
}
public static void main(String[] args) throws IOException {
AddGmailAdParams params = new AddGmailAdParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.customerId = Long.parseLong("INSERT_CUSTOMER_ID_HERE");
params.adGroupId = Long.parseLong("INSERT_AD_GROUP_ID_HERE");
}
GoogleAdsClient googleAdsClient;
try {
googleAdsClient = GoogleAdsClient.newBuilder().fromPropertiesFile().build();
} catch (FileNotFoundException fnfe) {
System.err.printf(
"Failed to load GoogleAdsClient configuration from file. Exception: %s%n", fnfe);
return;
} catch (IOException ioe) {
System.err.printf("Failed to create GoogleAdsClient. Exception: %s%n", ioe);
return;
}
try {
new AddGmailAd().runExample(googleAdsClient, params.customerId, params.adGroupId);
} catch (GoogleAdsException gae) {
// GoogleAdsException is the base class for most exceptions thrown by an API request.
// Instances of this exception have a message and a GoogleAdsFailure that contains a
// collection of GoogleAdsErrors that indicate the underlying causes of the
// GoogleAdsException.
System.err.printf(
"Request ID %s failed due to GoogleAdsException. Underlying errors:%n",
gae.getRequestId());
int i = 0;
for (GoogleAdsError googleAdsError : gae.getGoogleAdsFailure().getErrorsList()) {
System.err.printf(" Error %d: %s%n", i++, googleAdsError);
}
}
}
/**
* Runs the example.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param adGroupId the ad group ID.
* @throws GoogleAdsException if an API request failed with one or more service errors.
* @throws IOException if there is an error opening the image files.
*/
private void runExample(GoogleAdsClient googleAdsClient, long customerId, long adGroupId)
throws IOException {
Map<String, String> mediaFiles = addMediaFiles(googleAdsClient, customerId);
addGmailAd(googleAdsClient, customerId, adGroupId, mediaFiles);
}
/**
* Adds the image files.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @throws GoogleAdsException if an API request failed with one or more service errors.
* @throws IOException if there is an error opening the image files.
* @return a hash map of the image file resource names.
*/
private Map<String, String> addMediaFiles(GoogleAdsClient googleAdsClient, long customerId)
throws IOException {
// Creates a bytes array from the logo image data.
byte[] logoImageData = ByteStreams.toByteArray(new URL("https://goo.gl/mtt54n").openStream());
// Creates the logo image.
MediaFile mediaFileLogo =
MediaFile.newBuilder()
.setType(MediaType.IMAGE)
.setImage(
MediaImage.newBuilder()
.setData(BytesValue.of(ByteString.copyFrom(logoImageData)))
.build())
.setMimeType(MimeType.IMAGE_PNG)
.build();
// Creates the operation for the logo image.
MediaFileOperation mediaFileLogoOperation =
MediaFileOperation.newBuilder().setCreate(mediaFileLogo).build();
// Creates a bytes array from the marketing image data.
byte[] marketingImageData =
ByteStreams.toByteArray(new URL("https://goo.gl/3b9Wfh").openStream());
// Creates the marketing image.
MediaFile mediaFileMarketing =
MediaFile.newBuilder()
.setType(MediaType.IMAGE)
.setImage(
MediaImage.newBuilder()
.setData(BytesValue.of(ByteString.copyFrom(marketingImageData)))
.build())
.setMimeType(MimeType.IMAGE_JPEG)
.build();
// Creates the operation for the marketing image.
MediaFileOperation mediaFileMarketingOperation =
MediaFileOperation.newBuilder().setCreate(mediaFileMarketing).build();
// Creates the media file service client.
try (MediaFileServiceClient mediaFileServiceClient =
googleAdsClient.getLatestVersion().createMediaFileServiceClient()) {
// Adds the media files.
MutateMediaFilesResponse response =
mediaFileServiceClient.mutateMediaFiles(
Long.toString(customerId),
ImmutableList.of(mediaFileLogoOperation, mediaFileMarketingOperation));
// Displays the results.
for (MutateMediaFileResult result : response.getResultsList()) {
System.out.printf(
"Created media file with resource name '%s'.%n", result.getResourceName());
}
// Creates a map of the media files to return.
Map<String, String> mediaFiles = new HashMap<>();
mediaFiles.put("logoResourceName", response.getResults(0).getResourceName());
mediaFiles.put("marketingImageResourceName", response.getResults(1).getResourceName());
return mediaFiles;
}
}
/**
* Adds the gmail ad.
*
* @param googleAdsClient the Google Ads API client.
* @param customerId the client customer ID.
* @param adGroupId the ad group ID.
* @param mediaFiles a map with keys of unique string identifiers and values of media file
* resource names.
* @throws GoogleAdsException if an API request failed with one or more service errors.
* @throws IOException if there is an error opening the image files.
*/
private void addGmailAd(
GoogleAdsClient googleAdsClient,
long customerId,
long adGroupId,
Map<String, String> mediaFiles) {
// Creates the Gmail ad info.
GmailAdInfo gmailAdInfo =
GmailAdInfo.newBuilder()
// Sets the teaser information.
.setTeaser(
GmailTeaser.newBuilder()
.setHeadline(StringValue.of("Dream"))
.setDescription(StringValue.of("Create your own adventure"))
.setBusinessName(StringValue.of("Interplanetary Ships"))
.setLogoImage(StringValue.of(mediaFiles.get("logoResourceName")))
.build())
// Sets the marketing image and other information.
.setMarketingImage(StringValue.of(mediaFiles.get("marketingImageResourceName")))
.setMarketingImageHeadline(StringValue.of("Travel"))
.setMarketingImageDescription(StringValue.of("Take to the skies!"))
.build();
// Creates the ad.
Ad ad =
Ad.newBuilder()
.setName(StringValue.of("Gmail Ad #" + System.currentTimeMillis()))
.addFinalUrls(StringValue.of("http://www.example.com"))
.setGmailAd(gmailAdInfo)
.build();
// Gets the ad group resource name.
String adGroupResourceName = ResourceNames.adGroup(customerId, adGroupId);
// Creates the ad group ad.
AdGroupAd adGroupAd =
AdGroupAd.newBuilder()
.setAd(ad)
.setStatus(AdGroupAdStatus.PAUSED)
.setAdGroup(StringValue.of(adGroupResourceName))
.build();
// Creates the operation.
AdGroupAdOperation operation = AdGroupAdOperation.newBuilder().setCreate(adGroupAd).build();
// Creates the ad group ad service client.
try (AdGroupAdServiceClient adGroupAdServiceClient =
googleAdsClient.getLatestVersion().createAdGroupAdServiceClient()) {
MutateAdGroupAdsResponse response =
adGroupAdServiceClient.mutateAdGroupAds(
Long.toString(customerId), ImmutableList.of(operation));
// Displays the results.
System.out.printf(
"Created ad group ad with resource name '%s'.%n",
response.getResults(0).getResourceName());
}
}
}
|
/**
* Copyright (c) 2001-2014 Mathew A. Nelson and Robocode contributors
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://robocode.sourceforge.net/license/epl-v10.html
*/
package net.sf.robocode.test.robots;
import net.sf.robocode.test.helpers.RobocodeTestBed;
import org.junit.Assert;
import org.junit.Test;
import robocode.control.events.BattleFinishedEvent;
import robocode.control.events.TurnEndedEvent;
import robocode.control.snapshot.IScoreSnapshot;
import robocode.control.snapshot.ITurnSnapshot;
/**
* @author Flemming N. Larsen (original)
*/
public class TestFairPlay extends RobocodeTestBed {
ITurnSnapshot lastTurnSnapshot;
@Test
public void run() {
super.run();
}
public String getRobotNames() {
return "tested.robots.FairPlay,tested.robots.FairPlay";
}
@Override
public int getNumRounds() {
return 200;
}
@Override
public void onTurnEnded(TurnEndedEvent event) {
super.onTurnEnded(event);
lastTurnSnapshot = event.getTurnSnapshot();
}
@Override
public void onBattleFinished(BattleFinishedEvent event) {
final IScoreSnapshot[] scores = lastTurnSnapshot.getSortedTeamScores();
final IScoreSnapshot score1 = scores[0];
final IScoreSnapshot score2 = scores[1];
double minTotal = Math.min(score1.getTotalScore(), score2.getTotalScore());
double maxTotal = Math.max(score1.getTotalScore(), score2.getTotalScore());
Assert.assertTrue("The total scores should be almost the same", maxTotal / minTotal <= 1.05);
double minFirsts = Math.min(score1.getTotalFirsts(), score2.getTotalFirsts());
double maxFirsts = Math.max(score1.getTotalFirsts(), score2.getTotalFirsts());
Assert.assertTrue("The total firsts should be almost the same", maxFirsts / minFirsts <= 1.05);
}
}
|
package com.airmap.airmapsdk.util;
import android.support.annotation.Nullable;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class CopyCollections {
@Nullable
public static <T> ArrayList<T> copy(@Nullable List<T> list) {
return list != null ? new ArrayList<>(list) : null;
}
@Nullable
public static <T> HashSet<T> copy(@Nullable Set<T> set) {
return set != null ? new HashSet<>(set) : null;
}
}
|
package com.qingchi.base.repository.user;
import com.qingchi.base.config.redis.RedisKeysConst;
import com.qingchi.base.model.user.IdentityImgDO;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.data.jpa.repository.JpaRepository;
import java.util.List;
import java.util.Optional;
public interface IdentityImgRepository extends JpaRepository<IdentityImgDO, Long> {
@CacheEvict(cacheNames = RedisKeysConst.userById, key = "#identityImg.userId")
IdentityImgDO save(IdentityImgDO identityImg);
Optional<IdentityImgDO> findFirstByUserIdAndStatusOrderByIdDesc(Integer userId, String status);
List<IdentityImgDO> findFirst20ByOrderByIdDesc();
// List<IdentityImgDO> findFirst20ByUserGenderInOrderByUserFaceValueDesc(List<String> genders, Pageable pageable);
List<IdentityImgDO> findFirst600ByOrderByIdDesc();
}
|
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.incrementalinstall;
import android.app.Application;
import android.app.Instrumentation;
import android.content.Context;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.content.pm.PackageManager.NameNotFoundException;
import android.os.Bundle;
import android.util.Log;
import dalvik.system.DexFile;
import java.io.File;
import java.lang.ref.WeakReference;
import java.util.List;
import java.util.Map;
/**
* An Application that replaces itself with another Application (as defined in
* an AndroidManifext.xml meta-data tag). It loads the other application only
* after side-loading its .so and .dex files from /data/local/tmp.
*
* This class is highly dependent on the private implementation details of
* Android's ActivityThread.java. However, it has been tested to work with
* JellyBean through Marshmallow.
*/
public final class BootstrapApplication extends Application {
private static final String TAG = "incrementalinstall";
private static final String MANAGED_DIR_PREFIX = "/data/local/tmp/incremental-app-";
private static final String REAL_APP_META_DATA_NAME = "incremental-install-real-app";
private static final String REAL_INSTRUMENTATION_META_DATA_NAME0 =
"incremental-install-real-instrumentation-0";
private static final String REAL_INSTRUMENTATION_META_DATA_NAME1 =
"incremental-install-real-instrumentation-1";
private ClassLoaderPatcher mClassLoaderPatcher;
private Application mRealApplication;
private Instrumentation mOrigInstrumentation;
private Instrumentation mRealInstrumentation;
private Object mStashedProviderList;
private Object mActivityThread;
public static DexFile[] sIncrementalDexFiles; // Needed by junit test runner.
@Override
protected void attachBaseContext(Context context) {
super.attachBaseContext(context);
try {
mActivityThread = Reflect.invokeMethod(Class.forName("android.app.ActivityThread"),
"currentActivityThread");
mClassLoaderPatcher = new ClassLoaderPatcher(context);
mOrigInstrumentation =
(Instrumentation) Reflect.getField(mActivityThread, "mInstrumentation");
Context instContext = mOrigInstrumentation.getContext();
if (instContext == null) {
instContext = context;
}
// When running with an instrumentation that lives in a different package from the
// application, we must load the dex files and native libraries from both pacakges.
// This logic likely won't work when the instrumentation is incremental, but the app is
// non-incremental. This configuration isn't used right now though.
String appPackageName = getPackageName();
String instPackageName = instContext.getPackageName();
boolean instPackageNameDiffers = !appPackageName.equals(instPackageName);
Log.i(TAG, "App PackageName: " + appPackageName);
if (instPackageNameDiffers) {
Log.i(TAG, "Inst PackageName: " + instPackageName);
}
File appIncrementalRootDir = new File(MANAGED_DIR_PREFIX + appPackageName);
File appLibDir = new File(appIncrementalRootDir, "lib");
File appDexDir = new File(appIncrementalRootDir, "dex");
File appInstallLockFile = new File(appIncrementalRootDir, "install.lock");
File appFirstRunLockFile = new File(appIncrementalRootDir, "firstrun.lock");
File instIncrementalRootDir = new File(MANAGED_DIR_PREFIX + instPackageName);
File instLibDir = new File(instIncrementalRootDir, "lib");
File instDexDir = new File(instIncrementalRootDir, "dex");
File instInstallLockFile = new File(instIncrementalRootDir, "install.lock");
File instFirstRunLockFile = new File(instIncrementalRootDir, "firstrun.lock");
boolean isFirstRun = LockFile.installerLockExists(appFirstRunLockFile)
|| (instPackageNameDiffers
&& LockFile.installerLockExists(instFirstRunLockFile));
if (isFirstRun) {
if (mClassLoaderPatcher.mIsPrimaryProcess) {
// Wait for incremental_install.py to finish.
LockFile.waitForInstallerLock(appInstallLockFile, 30 * 1000);
LockFile.waitForInstallerLock(instInstallLockFile, 30 * 1000);
} else {
// Wait for the browser process to create the optimized dex files
// and copy the library files.
LockFile.waitForInstallerLock(appFirstRunLockFile, 60 * 1000);
LockFile.waitForInstallerLock(instFirstRunLockFile, 60 * 1000);
}
}
mClassLoaderPatcher.importNativeLibs(instLibDir);
sIncrementalDexFiles = mClassLoaderPatcher.loadDexFiles(instDexDir, instPackageName);
if (instPackageNameDiffers) {
mClassLoaderPatcher.importNativeLibs(appLibDir);
mClassLoaderPatcher.loadDexFiles(appDexDir, appPackageName);
}
if (isFirstRun && mClassLoaderPatcher.mIsPrimaryProcess) {
LockFile.clearInstallerLock(appFirstRunLockFile);
if (instPackageNameDiffers) {
LockFile.clearInstallerLock(instFirstRunLockFile);
}
}
// mInstrumentationAppDir is one of a set of fields that is initialized only when
// instrumentation is active.
if (Reflect.getField(mActivityThread, "mInstrumentationAppDir") != null) {
String metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME0;
if (mOrigInstrumentation instanceof SecondInstrumentation) {
metaDataName = REAL_INSTRUMENTATION_META_DATA_NAME1;
}
mRealInstrumentation =
initInstrumentation(getClassNameFromMetadata(metaDataName, instContext));
} else {
Log.i(TAG, "No instrumentation active.");
}
// Even when instrumentation is not enabled, ActivityThread uses a default
// Instrumentation instance internally. We hook it here in order to hook into the
// call to Instrumentation.onCreate().
BootstrapInstrumentation bootstrapInstrumentation = new BootstrapInstrumentation(this);
populateInstrumenationFields(bootstrapInstrumentation);
Reflect.setField(mActivityThread, "mInstrumentation", bootstrapInstrumentation);
// attachBaseContext() is called from ActivityThread#handleBindApplication() and
// Application#mApplication is changed right after we return. Thus, we cannot swap
// the Application instances until onCreate() is called.
String realApplicationName = getClassNameFromMetadata(REAL_APP_META_DATA_NAME, context);
Log.i(TAG, "Instantiating " + realApplicationName);
Instrumentation anyInstrumentation =
mRealInstrumentation != null ? mRealInstrumentation : mOrigInstrumentation;
mRealApplication = anyInstrumentation.newApplication(
getClassLoader(), realApplicationName, context);
// Between attachBaseContext() and onCreate(), ActivityThread tries to instantiate
// all ContentProviders. The ContentProviders break without the correct Application
// class being installed, so temporarily pretend there are no providers, and then
// instantiate them explicitly within onCreate().
disableContentProviders();
Log.i(TAG, "Waiting for Instrumentation.onCreate");
} catch (Exception e) {
throw new RuntimeException("Incremental install failed.", e);
}
}
/**
* Returns the fully-qualified class name for the given key, stored in a
* <meta> witin the manifest.
*/
private static String getClassNameFromMetadata(String key, Context context)
throws NameNotFoundException {
String pkgName = context.getPackageName();
ApplicationInfo appInfo = context.getPackageManager().getApplicationInfo(pkgName,
PackageManager.GET_META_DATA);
String value = appInfo.metaData.getString(key);
if (value != null && !value.contains(".")) {
value = pkgName + "." + value;
}
return value;
}
/**
* Instantiates and initializes mRealInstrumentation (the real Instrumentation class).
*/
private Instrumentation initInstrumentation(String realInstrumentationName)
throws ReflectiveOperationException {
if (realInstrumentationName == null) {
// This is the case when an incremental app is used as a target for an instrumentation
// test. In this case, ActivityThread can instantiate the proper class just fine since
// it exists within the test apk (as opposed to the incremental apk-under-test).
Log.i(TAG, "Running with external instrumentation");
return null;
}
// For unit tests, the instrumentation class is replaced in the manifest by a build step
// because ActivityThread tries to instantiate it before we get a chance to load the
// incremental dex files.
Log.i(TAG, "Instantiating instrumentation " + realInstrumentationName);
Instrumentation ret =
(Instrumentation) Reflect.newInstance(Class.forName(realInstrumentationName));
populateInstrumenationFields(ret);
return ret;
}
/**
* Sets important fields on a newly created Instrumentation object by copying them from the
* original Instrumentation instance.
*/
private void populateInstrumenationFields(Instrumentation target)
throws ReflectiveOperationException {
// Initialize the fields that are set by Instrumentation.init().
String[] initFields = {"mAppContext", "mComponent", "mInstrContext", "mMessageQueue",
"mThread", "mUiAutomationConnection", "mWatcher"};
for (String fieldName : initFields) {
Reflect.setField(target, fieldName, Reflect.getField(mOrigInstrumentation, fieldName));
}
}
/**
* Called by BootstrapInstrumentation from Instrumentation.onCreate().
* This happens regardless of whether or not instrumentation is enabled.
*/
void onInstrumentationCreate(Bundle arguments) {
Log.i(TAG, "Instrumentation.onCreate() called. Swapping references.");
try {
swapApplicationReferences();
enableContentProviders();
if (mRealInstrumentation != null) {
Reflect.setField(mActivityThread, "mInstrumentation", mRealInstrumentation);
mRealInstrumentation.onCreate(arguments);
}
} catch (Exception e) {
throw new RuntimeException("Incremental install failed.", e);
}
}
@Override
public void onCreate() {
super.onCreate();
try {
Log.i(TAG, "Application.onCreate() called.");
mRealApplication.onCreate();
} catch (Exception e) {
throw new RuntimeException("Incremental install failed.", e);
}
}
/**
* Nulls out ActivityThread.mBoundApplication.providers.
*/
private void disableContentProviders() throws ReflectiveOperationException {
Object data = Reflect.getField(mActivityThread, "mBoundApplication");
mStashedProviderList = Reflect.getField(data, "providers");
Reflect.setField(data, "providers", null);
}
/**
* Restores the value of ActivityThread.mBoundApplication.providers, and invokes
* ActivityThread#installContentProviders().
*/
private void enableContentProviders() throws ReflectiveOperationException {
Object data = Reflect.getField(mActivityThread, "mBoundApplication");
Reflect.setField(data, "providers", mStashedProviderList);
if (mStashedProviderList != null && mClassLoaderPatcher.mIsPrimaryProcess) {
Log.i(TAG, "Instantiating content providers");
Reflect.invokeMethod(mActivityThread, "installContentProviders", mRealApplication,
mStashedProviderList);
}
mStashedProviderList = null;
}
/**
* Changes all fields within framework classes that have stored an reference to this
* BootstrapApplication to instead store references to mRealApplication.
*/
@SuppressWarnings("unchecked")
private void swapApplicationReferences() throws ReflectiveOperationException {
if (Reflect.getField(mActivityThread, "mInitialApplication") == this) {
Reflect.setField(mActivityThread, "mInitialApplication", mRealApplication);
}
List<Application> allApplications =
(List<Application>) Reflect.getField(mActivityThread, "mAllApplications");
for (int i = 0; i < allApplications.size(); i++) {
if (allApplications.get(i) == this) {
allApplications.set(i, mRealApplication);
}
}
// Contains a reference to BootstrapApplication and will cause BroadCastReceivers to fail
// if not replaced.
Context contextImpl = mRealApplication.getBaseContext();
Reflect.setField(contextImpl, "mOuterContext", mRealApplication);
for (String fieldName : new String[] {"mPackages", "mResourcePackages"}) {
Map<String, WeakReference<?>> packageMap =
(Map<String, WeakReference<?>>) Reflect.getField(mActivityThread, fieldName);
for (Map.Entry<String, WeakReference<?>> entry : packageMap.entrySet()) {
Object loadedApk = entry.getValue().get();
if (loadedApk != null && Reflect.getField(loadedApk, "mApplication") == this) {
Reflect.setField(loadedApk, "mApplication", mRealApplication);
}
}
}
}
}
|
package com.shiva.jenkvExamples;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import com.itextpdf.text.Anchor;
import com.itextpdf.text.Document;
import com.itextpdf.text.DocumentException;
import com.itextpdf.text.Paragraph;
import com.itextpdf.text.pdf.PdfWriter;
public class InternalAnchorLinks {
public static void main(String[] args) {
Document document = new Document();
try {
PdfWriter.getInstance(document,
new FileOutputStream("Anchor2.pdf"));
document.open();
Anchor anchor =
new Anchor("Jump down to next paragraph");
anchor.setReference("#linkTarget");
Paragraph paragraph = new Paragraph();
paragraph.add(anchor);
document.add(paragraph);
Anchor anchorTarget =
new Anchor("This is the target of the link above");
anchor.setName("linkTarget");
Paragraph targetParagraph = new Paragraph();
targetParagraph.setSpacingBefore(50);
targetParagraph.add(anchorTarget);
document.add(targetParagraph);
document.close();
} catch (DocumentException e) {
e.printStackTrace();
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
|
package com.android.picshow.model;
import android.net.Uri;
public class SimpleMediaItem {
public Uri itemUrl;
public boolean isImage;
public String itemType;
public SimpleMediaItem(Uri u, boolean image, String type) {
itemUrl = u;
isImage = image;
itemType = type;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.karaf.main.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.net.JarURLConnection;
import java.net.URI;
import java.net.URL;
import java.util.StringTokenizer;
public class Utils {
public static File getKarafHome(Class<?> mainClass, String karafHomeProperty, String karafHomeEnv) throws IOException {
File rc = null;
// Use the system property if specified.
String path = System.getProperty(karafHomeProperty);
if (path != null) {
rc = validateDirectoryExists(path, "Invalid " + karafHomeProperty + " system property", false, true);
}
if (rc == null) {
path = System.getenv(karafHomeEnv);
if (path != null) {
rc = validateDirectoryExists(path, "Invalid " + karafHomeEnv + " environment variable", false, true);
}
}
// Try to figure it out using the jar file this class was loaded from.
if (rc == null) {
// guess the home from the location of the jar
URL url = mainClass.getClassLoader().getResource(mainClass.getName().replace(".", "/") + ".class");
if (url != null) {
try {
JarURLConnection jarConnection = (JarURLConnection) url.openConnection();
url = jarConnection.getJarFileURL();
rc = new File(new URI(url.toString())).getCanonicalFile().getParentFile().getParentFile();
} catch (Exception ignored) {
}
}
}
if (rc == null) {
// Dig into the classpath to guess the location of the jar
String classpath = System.getProperty("java.class.path");
int index = classpath.toLowerCase().indexOf("karaf.jar");
int start = classpath.lastIndexOf(File.pathSeparator, index) + 1;
if (index >= start) {
String jarLocation = classpath.substring(start, index);
rc = new File(jarLocation).getCanonicalFile().getParentFile();
}
}
if (rc == null) {
throw new IOException("The Karaf install directory could not be determined. Please set the " + karafHomeProperty + " system property or the " + karafHomeEnv + " environment variable.");
}
return rc;
}
public static File validateDirectoryExists(String path, String errPrefix, boolean createDirectory, boolean validate) {
File rc;
try {
rc = new File(path).getCanonicalFile();
} catch (IOException e) {
throw new IllegalArgumentException(errPrefix + " '" + path + "' : " + e.getMessage());
}
if (!rc.exists() && !createDirectory && validate) {
throw new IllegalArgumentException(errPrefix + " '" + path + "' : does not exist");
}
if (!rc.exists() && createDirectory) {
try {
rc.mkdirs();
} catch (SecurityException se) {
throw new IllegalArgumentException(errPrefix + " '" + path + "' : " + se.getMessage());
}
}
if (rc.exists() && !rc.isDirectory()) {
throw new IllegalArgumentException(errPrefix + " '" + path + "' : is not a directory");
}
return rc;
}
public static File getKarafDirectory(String directoryProperty, String directoryEnvironmentVariable, File defaultValue, boolean create, boolean validate) {
File rc = null;
String path = System.getProperty(directoryProperty);
if (path != null) {
rc = validateDirectoryExists(path, "Invalid " + directoryProperty + " system property", create, validate);
}
if (rc == null) {
path = System.getenv(directoryEnvironmentVariable);
if (path != null && validate) {
rc = validateDirectoryExists(path, "Invalid " + directoryEnvironmentVariable + " environment variable", create, validate);
}
}
if (rc == null) {
rc = defaultValue;
}
return rc;
}
//-----------------------------------------------------------------------
/**
* Recursively delete a directory.
* @param directory directory to delete
* @throws IOException in case deletion is unsuccessful
*/
public static void deleteDirectory(File directory)
throws IOException {
if (!directory.exists()) {
return;
}
cleanDirectory(directory);
if (!directory.delete()) {
String message =
"Unable to delete directory " + directory + ".";
throw new IOException(message);
}
}
/**
* Clean a directory without deleting it.
* @param directory directory to clean
* @throws IOException in case cleaning is unsuccessful
*/
public static void cleanDirectory(File directory) throws IOException {
if (!directory.exists()) {
String message = directory + " does not exist";
throw new IllegalArgumentException(message);
}
if (!directory.isDirectory()) {
String message = directory + " is not a directory";
throw new IllegalArgumentException(message);
}
File[] files = directory.listFiles();
if (files == null) { // null if security restricted
throw new IOException("Failed to list contents of " + directory);
}
IOException exception = null;
for (File file : files) {
try {
forceDelete(file);
} catch (IOException ioe) {
exception = ioe;
}
}
if (null != exception) {
throw exception;
}
}
/**
* <p>
* Delete a file. If file is a directory, delete it and all sub-directories.
* </p>
* <p>
* The difference between File.delete() and this method are:
* </p>
* <ul>
* <li>A directory to be deleted does not have to be empty.</li>
* <li>You get exceptions when a file or directory cannot be deleted.
* (java.io.File methods returns a boolean)</li>
* </ul>
* @param file file or directory to delete.
* @throws IOException in case deletion is unsuccessful
*/
public static void forceDelete(File file) throws IOException {
if (file.isDirectory()) {
deleteDirectory(file);
} else {
if (!file.exists()) {
throw new FileNotFoundException("File does not exist: " + file);
}
if (!file.delete()) {
String message =
"Unable to delete file: " + file;
throw new IOException(message);
}
}
}
public static String[] convertToMavenUrlsIfNeeded(String location,
boolean convertToMavenUrls) {
String[] parts = location.split("\\|");
if (convertToMavenUrls) {
if (!parts[1].startsWith("mvn:")) {
String[] p = parts[1].split("/");
if (p.length >= 4
&& p[p.length - 1].startsWith(p[p.length - 3] + "-"
+ p[p.length - 2])) {
String artifactId = p[p.length - 3];
String version = p[p.length - 2];
String classifier;
String type;
String artifactIdVersion = artifactId + "-" + version;
StringBuilder sb = new StringBuilder();
if (p[p.length - 1].charAt(artifactIdVersion.length()) == '-') {
classifier = p[p.length - 1].substring(
artifactIdVersion.length() + 1,
p[p.length - 1].lastIndexOf('.'));
} else {
classifier = null;
}
type = p[p.length - 1].substring(p[p.length - 1]
.lastIndexOf('.') + 1);
sb.append("mvn:");
for (int j = 0; j < p.length - 3; j++) {
if (j > 0) {
sb.append('.');
}
sb.append(p[j]);
}
sb.append('/').append(artifactId).append('/').append(version);
if (!"jar".equals(type) || classifier != null) {
sb.append('/');
if (!"jar".equals(type)) {
sb.append(type);
}
if (classifier != null) {
sb.append('/').append(classifier);
}
}
parts[1] = parts[0];
parts[0] = sb.toString();
} else {
parts[1] = parts[0];
}
} else {
String tmp = parts[0];
parts[0] = parts[1];
parts[1] = tmp;
}
} else {
parts[1] = parts[0];
}
return parts;
}
public static String nextLocation(StringTokenizer st) {
String retVal = null;
if (st.countTokens() > 0) {
String tokenList = "\" ";
StringBuilder tokBuf = new StringBuilder(10);
String tok;
boolean inQuote = false;
boolean tokStarted = false;
boolean exit = false;
while ((st.hasMoreTokens()) && (!exit)) {
tok = st.nextToken(tokenList);
if (tok.equals("\"")) {
inQuote = !inQuote;
if (inQuote) {
tokenList = "\"";
} else {
tokenList = "\" ";
}
} else if (tok.equals(" ")) {
if (tokStarted) {
retVal = tokBuf.toString();
tokStarted = false;
tokBuf = new StringBuilder(10);
exit = true;
}
} else {
tokStarted = true;
tokBuf.append(tok.trim());
}
}
// Handle case where end of token stream and
// still got data
if ((!exit) && (tokStarted)) {
retVal = tokBuf.toString();
}
}
return retVal;
}
}
|
package ai.sangmado.gbprotocol.jt808db51chuanbiao.protocol.message.content.JT808DB51ChuanBiao_Message_Content_0x0200_Additional;
import ai.sangmado.gbprotocol.jt808.protocol.IVersionedSpecificationContext;
import ai.sangmado.gbprotocol.jt808.protocol.message.content.JT808_Message_Content_0x0200_Additional.JT808_Message_Content_0x0200_AdditionalInformation;
import ai.sangmado.gbprotocol.jt808.protocol.message.content.JT808_Message_Content_0x0200_Additional.JT808_Message_Content_0x0200_AdditionalInformationId;
import ai.sangmado.gbprotocol.jt808.protocol.serialization.IJT808MessageBufferReader;
import ai.sangmado.gbprotocol.jt808.protocol.serialization.IJT808MessageBufferWriter;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
/**
* 终端位置信息汇报 - 位置附加信息 - 激烈驾驶报警信息
*/
@NoArgsConstructor
public class JT808DB51ChuanBiao_Message_Content_0x0200_AI_0x70 extends JT808_Message_Content_0x0200_AdditionalInformation {
public static final JT808_Message_Content_0x0200_AdditionalInformationId ADDITIONAL_INFORMATION_ID =
JT808DB51ChuanBiao_Message_Content_0x0200_AdditionalInformationId.JT808DB51ChuanBiao_0x0200_0x70;
@Override
public JT808_Message_Content_0x0200_AdditionalInformationId getAdditionalInformationId() {
return ADDITIONAL_INFORMATION_ID;
}
/**
* xxx
*/
@Getter
@Setter
private Integer xxx;
@Override
public void serialize(IVersionedSpecificationContext ctx, IJT808MessageBufferWriter writer) {
}
@Override
public void deserialize(IVersionedSpecificationContext ctx, IJT808MessageBufferReader reader) {
}
public static JT808DB51ChuanBiao_Message_Content_0x0200_AI_0x70 decode(IVersionedSpecificationContext ctx, IJT808MessageBufferReader reader) {
JT808DB51ChuanBiao_Message_Content_0x0200_AI_0x70 content = new JT808DB51ChuanBiao_Message_Content_0x0200_AI_0x70();
content.deserialize(ctx, reader);
return content;
}
}
|
/*------------------------------------------------------------------------------
Copyright (c) CovertJaguar, 2011-2016
http://railcraft.info
This code is the property of CovertJaguar
and may only be used with explicit written
permission unless otherwise specified on the
license page at http://railcraft.info/wiki/info:license.
-----------------------------------------------------------------------------*/
package mods.railcraft.client.gui;
import mods.railcraft.common.blocks.single.TileEngineSteam;
import mods.railcraft.common.core.RailcraftConstants;
import mods.railcraft.common.gui.containers.ContainerEngineSteam;
import net.minecraft.entity.player.InventoryPlayer;
import net.minecraft.util.text.translation.I18n;
public class GuiEngineSteam extends TileGui {
private static final String OUTPUT = "%d RF";
private final TileEngineSteam tile;
public GuiEngineSteam(InventoryPlayer inv, TileEngineSteam tile) {
super(tile, new ContainerEngineSteam(inv, tile), RailcraftConstants.GUI_TEXTURE_FOLDER + "gui_engine_steam.png");
this.tile = tile;
}
@Override
protected void drawGuiContainerForegroundLayer(int mouseX, int mouseY) {
super.drawGuiContainerForegroundLayer(mouseX, mouseY);
fontRenderer.drawString(String.format(OUTPUT, Math.round(tile.getCurrentOutput())), 120, 40, 0x404040);
fontRenderer.drawString(I18n.translateToLocal("container.inventory"), 8, (ySize - 96) + 2, 0x404040);
}
}
|
/*
* Copyright 1999-2012 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/**
* (created at 2011-1-23)
*/
package fm.liu.timo.parser.ast.expression.primary.function.datetime;
import java.util.List;
import fm.liu.timo.parser.ast.expression.Expression;
import fm.liu.timo.parser.ast.expression.primary.function.FunctionExpression;
/**
* @author <a href="mailto:shuo.qius@alibaba-inc.com">QIU Shuo</a>
*/
public class Datediff extends FunctionExpression {
public Datediff(List<Expression> arguments) {
super("DATEDIFF", arguments);
}
@Override
public FunctionExpression constructFunction(List<Expression> arguments) {
return new Datediff(arguments);
}
}
|
/**
* Copyright (C) 2009-2016 Lightbend Inc. <http://www.lightbend.com>
*/
package akka.http.javadsl.model;
import akka.http.impl.util.Util;
import akka.http.scaladsl.model.MediaTypes$;
import java.util.Optional;
/**
* Contains the set of predefined media-types.
*/
public final class MediaTypes {
private MediaTypes() { }
public static final MediaType.WithOpenCharset APPLICATION_ATOM_XML = akka.http.scaladsl.model.MediaTypes.application$divatom$plusxml();
public static final MediaType.WithOpenCharset APPLICATION_BASE64 = akka.http.scaladsl.model.MediaTypes.application$divbase64();
public static final MediaType.Binary APPLICATION_EXCEL = akka.http.scaladsl.model.MediaTypes.application$divexcel();
public static final MediaType.Binary APPLICATION_FONT_WOFF = akka.http.scaladsl.model.MediaTypes.application$divfont$minuswoff();
public static final MediaType.Binary APPLICATION_GNUTAR = akka.http.scaladsl.model.MediaTypes.application$divgnutar();
public static final MediaType.Binary APPLICATION_JAVA_ARCHIVE = akka.http.scaladsl.model.MediaTypes.application$divjava$minusarchive();
public static final MediaType.WithOpenCharset APPLICATION_JAVASCRIPT = akka.http.scaladsl.model.MediaTypes.application$divjavascript();
public static final MediaType.WithFixedCharset APPLICATION_JSON = akka.http.scaladsl.model.MediaTypes.application$divjson();
public static final MediaType.WithFixedCharset APPLICATION_JSON_PATCH_JSON = akka.http.scaladsl.model.MediaTypes.application$divjson$minuspatch$plusjson();
public static final MediaType.Binary APPLICATION_LHA = akka.http.scaladsl.model.MediaTypes.application$divlha();
public static final MediaType.Binary APPLICATION_LZX = akka.http.scaladsl.model.MediaTypes.application$divlzx();
public static final MediaType.Binary APPLICATION_MSPOWERPOINT = akka.http.scaladsl.model.MediaTypes.application$divmspowerpoint();
public static final MediaType.Binary APPLICATION_MSWORD = akka.http.scaladsl.model.MediaTypes.application$divmsword();
public static final MediaType.Binary APPLICATION_OCTET_STREAM = akka.http.scaladsl.model.MediaTypes.application$divoctet$minusstream();
public static final MediaType.Binary APPLICATION_PDF = akka.http.scaladsl.model.MediaTypes.application$divpdf();
public static final MediaType.Binary APPLICATION_POSTSCRIPT = akka.http.scaladsl.model.MediaTypes.application$divpostscript();
public static final MediaType.WithOpenCharset APPLICATION_RSS_XML = akka.http.scaladsl.model.MediaTypes.application$divrss$plusxml();
public static final MediaType.WithOpenCharset APPLICATION_SOAP_XML = akka.http.scaladsl.model.MediaTypes.application$divsoap$plusxml();
public static final MediaType.WithFixedCharset APPLICATION_VND_API_JSON = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eapi$plusjson();
public static final MediaType.WithOpenCharset APPLICATION_VND_GOOGLE_EARTH_KML_XML = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Egoogle$minusearth$u002Ekml$plusxml();
public static final MediaType.Binary APPLICATION_VND_GOOGLE_EARTH_KMZ = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Egoogle$minusearth$u002Ekmz();
public static final MediaType.Binary APPLICATION_VND_MS_FONTOBJECT = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Ems$minusfontobject();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_CHART = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Echart();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_DATABASE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Edatabase();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_FORMULA = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Eformula();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_GRAPHICS = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Egraphics();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_IMAGE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Eimage();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_PRESENTATION = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Epresentation();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_SPREADSHEET = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Espreadsheet();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_TEXT = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Etext();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_TEXT_MASTER = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Etext$minusmaster();
public static final MediaType.Binary APPLICATION_VND_OASIS_OPENDOCUMENT_TEXT_WEB = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eoasis$u002Eopendocument$u002Etext$minusweb();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_PRESENTATIONML_PRESENTATION = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Epresentationml$u002Epresentation();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_PRESENTATIONML_SLIDE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Epresentationml$u002Eslide();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_PRESENTATIONML_SLIDESHOW = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Epresentationml$u002Eslideshow();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_PRESENTATIONML_TEMPLATE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Epresentationml$u002Etemplate();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_SPREADSHEETML_SHEET = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Espreadsheetml$u002Esheet();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_SPREADSHEETML_TEMPLATE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Espreadsheetml$u002Etemplate();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_WORDPROCESSINGML_DOCUMENT = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Ewordprocessingml$u002Edocument();
public static final MediaType.Binary APPLICATION_VND_OPENXMLFORMATS_OFFICEDOCUMENT_WORDPROCESSINGML_TEMPLATE = akka.http.scaladsl.model.MediaTypes.application$divvnd$u002Eopenxmlformats$minusofficedocument$u002Ewordprocessingml$u002Etemplate();
public static final MediaType.Binary APPLICATION_X_7Z_COMPRESSED = akka.http.scaladsl.model.MediaTypes.application$divx$minus7z$minuscompressed();
public static final MediaType.Binary APPLICATION_X_ACE_COMPRESSED = akka.http.scaladsl.model.MediaTypes.application$divx$minusace$minuscompressed();
public static final MediaType.Binary APPLICATION_X_APPLE_DISKIMAGE = akka.http.scaladsl.model.MediaTypes.application$divx$minusapple$minusdiskimage();
public static final MediaType.Binary APPLICATION_X_ARC_COMPRESSED = akka.http.scaladsl.model.MediaTypes.application$divx$minusarc$minuscompressed();
public static final MediaType.Binary APPLICATION_X_BZIP = akka.http.scaladsl.model.MediaTypes.application$divx$minusbzip();
public static final MediaType.Binary APPLICATION_X_BZIP2 = akka.http.scaladsl.model.MediaTypes.application$divx$minusbzip2();
public static final MediaType.Binary APPLICATION_X_CHROME_EXTENSION = akka.http.scaladsl.model.MediaTypes.application$divx$minuschrome$minusextension();
public static final MediaType.Binary APPLICATION_X_COMPRESS = akka.http.scaladsl.model.MediaTypes.application$divx$minuscompress();
public static final MediaType.Binary APPLICATION_X_COMPRESSED = akka.http.scaladsl.model.MediaTypes.application$divx$minuscompressed();
public static final MediaType.Binary APPLICATION_X_DEBIAN_PACKAGE = akka.http.scaladsl.model.MediaTypes.application$divx$minusdebian$minuspackage();
public static final MediaType.Binary APPLICATION_X_DVI = akka.http.scaladsl.model.MediaTypes.application$divx$minusdvi();
public static final MediaType.Binary APPLICATION_X_FONT_TRUETYPE = akka.http.scaladsl.model.MediaTypes.application$divx$minusfont$minustruetype();
public static final MediaType.Binary APPLICATION_X_FONT_OPENTYPE = akka.http.scaladsl.model.MediaTypes.application$divx$minusfont$minusopentype();
public static final MediaType.Binary APPLICATION_X_GTAR = akka.http.scaladsl.model.MediaTypes.application$divx$minusgtar();
public static final MediaType.Binary APPLICATION_X_GZIP = akka.http.scaladsl.model.MediaTypes.application$divx$minusgzip();
public static final MediaType.WithOpenCharset APPLICATION_X_LATEX = akka.http.scaladsl.model.MediaTypes.application$divx$minuslatex();
public static final MediaType.Binary APPLICATION_X_RAR_COMPRESSED = akka.http.scaladsl.model.MediaTypes.application$divx$minusrar$minuscompressed();
public static final MediaType.Binary APPLICATION_X_REDHAT_PACKAGE_MANAGER = akka.http.scaladsl.model.MediaTypes.application$divx$minusredhat$minuspackage$minusmanager();
public static final MediaType.Binary APPLICATION_X_SHOCKWAVE_FLASH = akka.http.scaladsl.model.MediaTypes.application$divx$minusshockwave$minusflash();
public static final MediaType.Binary APPLICATION_X_TAR = akka.http.scaladsl.model.MediaTypes.application$divx$minustar();
public static final MediaType.Binary APPLICATION_X_TEX = akka.http.scaladsl.model.MediaTypes.application$divx$minustex();
public static final MediaType.Binary APPLICATION_X_TEXINFO = akka.http.scaladsl.model.MediaTypes.application$divx$minustexinfo();
public static final MediaType.WithOpenCharset APPLICATION_X_VRML = akka.http.scaladsl.model.MediaTypes.application$divx$minusvrml();
public static final MediaType.WithOpenCharset APPLICATION_X_WWW_FORM_URLENCODED = akka.http.scaladsl.model.MediaTypes.application$divx$minuswww$minusform$minusurlencoded();
public static final MediaType.Binary APPLICATION_X_X509_CA_CERT = akka.http.scaladsl.model.MediaTypes.application$divx$minusx509$minusca$minuscert();
public static final MediaType.Binary APPLICATION_X_XPINSTALL = akka.http.scaladsl.model.MediaTypes.application$divx$minusxpinstall();
public static final MediaType.WithOpenCharset APPLICATION_XHTML_XML = akka.http.scaladsl.model.MediaTypes.application$divxhtml$plusxml();
public static final MediaType.WithOpenCharset APPLICATION_XML_DTD = akka.http.scaladsl.model.MediaTypes.application$divxml$minusdtd();
public static final MediaType.WithOpenCharset APPLICATION_XML = akka.http.scaladsl.model.MediaTypes.application$divxml();
public static final MediaType.Binary APPLICATION_ZIP = akka.http.scaladsl.model.MediaTypes.application$divzip();
public static final MediaType.Binary AUDIO_AIFF = akka.http.scaladsl.model.MediaTypes.audio$divaiff();
public static final MediaType.Binary AUDIO_BASIC = akka.http.scaladsl.model.MediaTypes.audio$divbasic();
public static final MediaType.Binary AUDIO_MIDI = akka.http.scaladsl.model.MediaTypes.audio$divmidi();
public static final MediaType.Binary AUDIO_MOD = akka.http.scaladsl.model.MediaTypes.audio$divmod();
public static final MediaType.Binary AUDIO_MPEG = akka.http.scaladsl.model.MediaTypes.audio$divmpeg();
public static final MediaType.Binary AUDIO_OGG = akka.http.scaladsl.model.MediaTypes.audio$divogg();
public static final MediaType.Binary AUDIO_VOC = akka.http.scaladsl.model.MediaTypes.audio$divvoc();
public static final MediaType.Binary AUDIO_VORBIS = akka.http.scaladsl.model.MediaTypes.audio$divvorbis();
public static final MediaType.Binary AUDIO_VOXWARE = akka.http.scaladsl.model.MediaTypes.audio$divvoxware();
public static final MediaType.Binary AUDIO_WAV = akka.http.scaladsl.model.MediaTypes.audio$divwav();
public static final MediaType.Binary AUDIO_X_REALAUDIO = akka.http.scaladsl.model.MediaTypes.audio$divx$minusrealaudio();
public static final MediaType.Binary AUDIO_X_PSID = akka.http.scaladsl.model.MediaTypes.audio$divx$minuspsid();
public static final MediaType.Binary AUDIO_XM = akka.http.scaladsl.model.MediaTypes.audio$divxm();
public static final MediaType.Binary AUDIO_WEBM = akka.http.scaladsl.model.MediaTypes.audio$divwebm();
public static final MediaType.Binary IMAGE_GIF = akka.http.scaladsl.model.MediaTypes.image$divgif();
public static final MediaType.Binary IMAGE_JPEG = akka.http.scaladsl.model.MediaTypes.image$divjpeg();
public static final MediaType.Binary IMAGE_PICT = akka.http.scaladsl.model.MediaTypes.image$divpict();
public static final MediaType.Binary IMAGE_PNG = akka.http.scaladsl.model.MediaTypes.image$divpng();
public static final MediaType.Binary IMAGE_SVG_XML = akka.http.scaladsl.model.MediaTypes.image$divsvg$plusxml();
public static final MediaType.Binary IMAGE_TIFF = akka.http.scaladsl.model.MediaTypes.image$divtiff();
public static final MediaType.Binary IMAGE_X_ICON = akka.http.scaladsl.model.MediaTypes.image$divx$minusicon();
public static final MediaType.Binary IMAGE_X_MS_BMP = akka.http.scaladsl.model.MediaTypes.image$divx$minusms$minusbmp();
public static final MediaType.Binary IMAGE_X_PCX = akka.http.scaladsl.model.MediaTypes.image$divx$minuspcx();
public static final MediaType.Binary IMAGE_X_PICT = akka.http.scaladsl.model.MediaTypes.image$divx$minuspict();
public static final MediaType.Binary IMAGE_X_QUICKTIME = akka.http.scaladsl.model.MediaTypes.image$divx$minusquicktime();
public static final MediaType.Binary IMAGE_X_RGB = akka.http.scaladsl.model.MediaTypes.image$divx$minusrgb();
public static final MediaType.Binary IMAGE_X_XBITMAP = akka.http.scaladsl.model.MediaTypes.image$divx$minusxbitmap();
public static final MediaType.Binary IMAGE_X_XPIXMAP = akka.http.scaladsl.model.MediaTypes.image$divx$minusxpixmap();
public static final MediaType.Binary IMAGE_WEBP = akka.http.scaladsl.model.MediaTypes.image$divwebp();
public static final MediaType.Binary MESSAGE_HTTP = akka.http.scaladsl.model.MediaTypes.message$divhttp();
public static final MediaType.Binary MESSAGE_DELIVERY_STATUS = akka.http.scaladsl.model.MediaTypes.message$divdelivery$minusstatus();
public static final MediaType.Binary MESSAGE_RFC822 = akka.http.scaladsl.model.MediaTypes.message$divrfc822();
public static final MediaType.WithOpenCharset MULTIPART_MIXED = akka.http.scaladsl.model.MediaTypes.multipart$divmixed();
public static final MediaType.WithOpenCharset MULTIPART_ALTERNATIVE = akka.http.scaladsl.model.MediaTypes.multipart$divalternative();
public static final MediaType.WithOpenCharset MULTIPART_RELATED = akka.http.scaladsl.model.MediaTypes.multipart$divrelated();
public static final MediaType.WithOpenCharset MULTIPART_FORM_DATA = akka.http.scaladsl.model.MediaTypes.multipart$divform$minusdata();
public static final MediaType.WithOpenCharset MULTIPART_SIGNED = akka.http.scaladsl.model.MediaTypes.multipart$divsigned();
public static final MediaType.WithOpenCharset MULTIPART_ENCRYPTED = akka.http.scaladsl.model.MediaTypes.multipart$divencrypted();
public static final MediaType.WithOpenCharset MULTIPART_BYTERANGES = akka.http.scaladsl.model.MediaTypes.multipart$divbyteranges();
public static final MediaType.WithOpenCharset TEXT_ASP = akka.http.scaladsl.model.MediaTypes.text$divasp();
public static final MediaType.WithOpenCharset TEXT_CACHE_MANIFEST = akka.http.scaladsl.model.MediaTypes.text$divcache$minusmanifest();
public static final MediaType.WithOpenCharset TEXT_CALENDAR = akka.http.scaladsl.model.MediaTypes.text$divcalendar();
public static final MediaType.WithOpenCharset TEXT_CSS = akka.http.scaladsl.model.MediaTypes.text$divcss();
public static final MediaType.WithOpenCharset TEXT_CSV = akka.http.scaladsl.model.MediaTypes.text$divcsv();
public static final MediaType.WithOpenCharset TEXT_HTML = akka.http.scaladsl.model.MediaTypes.text$divhtml();
public static final MediaType.WithOpenCharset TEXT_MCF = akka.http.scaladsl.model.MediaTypes.text$divmcf();
public static final MediaType.WithOpenCharset TEXT_PLAIN = akka.http.scaladsl.model.MediaTypes.text$divplain();
public static final MediaType.WithOpenCharset TEXT_RICHTEXT = akka.http.scaladsl.model.MediaTypes.text$divrichtext();
public static final MediaType.WithOpenCharset TEXT_TAB_SEPARATED_VALUES = akka.http.scaladsl.model.MediaTypes.text$divtab$minusseparated$minusvalues();
public static final MediaType.WithOpenCharset TEXT_URI_LIST = akka.http.scaladsl.model.MediaTypes.text$divuri$minuslist();
public static final MediaType.WithOpenCharset TEXT_VND_WAP_WML = akka.http.scaladsl.model.MediaTypes.text$divvnd$u002Ewap$u002Ewml();
public static final MediaType.WithOpenCharset TEXT_VND_WAP_WMLSCRIPT = akka.http.scaladsl.model.MediaTypes.text$divvnd$u002Ewap$u002Ewmlscript();
public static final MediaType.WithOpenCharset TEXT_X_ASM = akka.http.scaladsl.model.MediaTypes.text$divx$minusasm();
public static final MediaType.WithOpenCharset TEXT_X_C = akka.http.scaladsl.model.MediaTypes.text$divx$minusc();
public static final MediaType.WithOpenCharset TEXT_X_COMPONENT = akka.http.scaladsl.model.MediaTypes.text$divx$minuscomponent();
public static final MediaType.WithOpenCharset TEXT_X_H = akka.http.scaladsl.model.MediaTypes.text$divx$minush();
public static final MediaType.WithOpenCharset TEXT_X_JAVA_SOURCE = akka.http.scaladsl.model.MediaTypes.text$divx$minusjava$minussource();
public static final MediaType.WithOpenCharset TEXT_X_PASCAL = akka.http.scaladsl.model.MediaTypes.text$divx$minuspascal();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPT = akka.http.scaladsl.model.MediaTypes.text$divx$minusscript();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTCSH = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptcsh();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTELISP = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptelisp();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTKSH = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptksh();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTLISP = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptlisp();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTPERL = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptperl();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTPERL_MODULE = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptperl$minusmodule();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTPHYTON = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptphyton();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTREXX = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptrexx();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTSCHEME = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptscheme();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTSH = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptsh();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTTCL = akka.http.scaladsl.model.MediaTypes.text$divx$minusscripttcl();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTTCSH = akka.http.scaladsl.model.MediaTypes.text$divx$minusscripttcsh();
public static final MediaType.WithOpenCharset TEXT_X_SCRIPTZSH = akka.http.scaladsl.model.MediaTypes.text$divx$minusscriptzsh();
public static final MediaType.WithOpenCharset TEXT_X_SERVER_PARSED_HTML = akka.http.scaladsl.model.MediaTypes.text$divx$minusserver$minusparsed$minushtml();
public static final MediaType.WithOpenCharset TEXT_X_SETEXT = akka.http.scaladsl.model.MediaTypes.text$divx$minussetext();
public static final MediaType.WithOpenCharset TEXT_X_SGML = akka.http.scaladsl.model.MediaTypes.text$divx$minussgml();
public static final MediaType.WithOpenCharset TEXT_X_SPEECH = akka.http.scaladsl.model.MediaTypes.text$divx$minusspeech();
public static final MediaType.WithOpenCharset TEXT_X_UUENCODE = akka.http.scaladsl.model.MediaTypes.text$divx$minusuuencode();
public static final MediaType.WithOpenCharset TEXT_X_VCALENDAR = akka.http.scaladsl.model.MediaTypes.text$divx$minusvcalendar();
public static final MediaType.WithOpenCharset TEXT_X_VCARD = akka.http.scaladsl.model.MediaTypes.text$divx$minusvcard();
public static final MediaType.WithOpenCharset TEXT_XML = akka.http.scaladsl.model.MediaTypes.text$divxml();
public static final MediaType.Binary VIDEO_AVS_VIDEO = akka.http.scaladsl.model.MediaTypes.video$divavs$minusvideo();
public static final MediaType.Binary VIDEO_DIVX = akka.http.scaladsl.model.MediaTypes.video$divdivx();
public static final MediaType.Binary VIDEO_GL = akka.http.scaladsl.model.MediaTypes.video$divgl();
public static final MediaType.Binary VIDEO_MP4 = akka.http.scaladsl.model.MediaTypes.video$divmp4();
public static final MediaType.Binary VIDEO_MPEG = akka.http.scaladsl.model.MediaTypes.video$divmpeg();
public static final MediaType.Binary VIDEO_OGG = akka.http.scaladsl.model.MediaTypes.video$divogg();
public static final MediaType.Binary VIDEO_QUICKTIME = akka.http.scaladsl.model.MediaTypes.video$divquicktime();
public static final MediaType.Binary VIDEO_X_DV = akka.http.scaladsl.model.MediaTypes.video$divx$minusdv();
public static final MediaType.Binary VIDEO_X_FLV = akka.http.scaladsl.model.MediaTypes.video$divx$minusflv();
public static final MediaType.Binary VIDEO_X_MOTION_JPEG = akka.http.scaladsl.model.MediaTypes.video$divx$minusmotion$minusjpeg();
public static final MediaType.Binary VIDEO_X_MS_ASF = akka.http.scaladsl.model.MediaTypes.video$divx$minusms$minusasf();
public static final MediaType.Binary VIDEO_X_MSVIDEO = akka.http.scaladsl.model.MediaTypes.video$divx$minusmsvideo();
public static final MediaType.Binary VIDEO_X_SGI_MOVIE = akka.http.scaladsl.model.MediaTypes.video$divx$minussgi$minusmovie();
public static final MediaType.Binary VIDEO_WEBM = akka.http.scaladsl.model.MediaTypes.video$divwebm();
/**
* Creates a custom media type.
*/
public static MediaType custom(String value, boolean binary, boolean compressible) {
akka.http.scaladsl.model.MediaType.Compressibility comp = compressible ?
akka.http.scaladsl.model.MediaType.Compressible$.MODULE$ : akka.http.scaladsl.model.MediaType.NotCompressible$.MODULE$;
return akka.http.scaladsl.model.MediaType.custom(value, binary, comp ,
akka.http.scaladsl.model.MediaType.custom$default$4());
}
/**
* Looks up a media-type with the given main-type and sub-type.
*/
public static Optional<MediaType> lookup(String mainType, String subType) {
return Util.<scala.Tuple2<String, String>, MediaType, akka.http.scaladsl.model.MediaType>lookupInRegistry(MediaTypes$.MODULE$, new scala.Tuple2<String, String>(mainType, subType));
}
}
|
import java.util.TreeMap;
public class HashTable<K extends Comparable<K>, V> {
private final int[] capacity
= {53, 97, 193, 389, 769, 1543, 3079, 6151, 12289, 24593,
49157, 98317, 196613, 393241, 786433, 1572869, 3145739, 6291469,
12582917, 25165843, 50331653, 100663319, 201326611, 402653189, 805306457, 1610612741};
private static final int upperTol = 10;
private static final int lowerTol = 2;
private int capacityIndex = 0;
private TreeMap<K, V>[] hashtable;
private int size;
private int M;
public HashTable(){
this.M = capacity[capacityIndex];
size = 0;
hashtable = new TreeMap[M];
for(int i = 0 ; i < M ; i ++)
hashtable[i] = new TreeMap<>();
}
private int hash(K key){
return (key.hashCode() & 0x7fffffff) % M;
}
public int getSize(){
return size;
}
public void add(K key, V value){
TreeMap<K, V> map = hashtable[hash(key)];
if(map.containsKey(key))
map.put(key, value);
else{
map.put(key, value);
size ++;
if(size >= upperTol * M && capacityIndex + 1 < capacity.length){
capacityIndex ++;
resize(capacity[capacityIndex]);
}
}
}
public V remove(K key){
V ret = null;
TreeMap<K, V> map = hashtable[hash(key)];
if(map.containsKey(key)){
ret = map.remove(key);
size --;
if(size < lowerTol * M && capacityIndex - 1 >= 0){
capacityIndex --;
resize(capacity[capacityIndex]);
}
}
return ret;
}
public void set(K key, V value){
TreeMap<K, V> map = hashtable[hash(key)];
if(!map.containsKey(key))
throw new IllegalArgumentException(key + " doesn't exist!");
map.put(key, value);
}
public boolean contains(K key){
return hashtable[hash(key)].containsKey(key);
}
public V get(K key){
return hashtable[hash(key)].get(key);
}
private void resize(int newM){
TreeMap<K, V>[] newHashTable = new TreeMap[newM];
for(int i = 0 ; i < newM ; i ++)
newHashTable[i] = new TreeMap<>();
int oldM = M;
this.M = newM;
for(int i = 0 ; i < oldM ; i ++){
TreeMap<K, V> map = hashtable[i];
for(K key: map.keySet())
newHashTable[hash(key)].put(key, map.get(key));
}
this.hashtable = newHashTable;
}
}
|
package com.shixinke.utils.web.config;
import com.alibaba.csp.sentinel.annotation.aspectj.SentinelResourceAspect;
import com.alibaba.csp.sentinel.config.SentinelConfig;
import com.alibaba.csp.sentinel.datasource.ReadableDataSource;
import com.alibaba.csp.sentinel.datasource.apollo.ApolloDataSource;
import com.alibaba.csp.sentinel.log.LogBase;
import com.alibaba.csp.sentinel.slots.block.authority.AuthorityRule;
import com.alibaba.csp.sentinel.slots.block.authority.AuthorityRuleManager;
import com.alibaba.csp.sentinel.slots.block.degrade.DegradeRule;
import com.alibaba.csp.sentinel.slots.block.degrade.DegradeRuleManager;
import com.alibaba.csp.sentinel.slots.block.flow.FlowRule;
import com.alibaba.csp.sentinel.slots.block.flow.FlowRuleManager;
import com.alibaba.csp.sentinel.slots.block.flow.param.ParamFlowRule;
import com.alibaba.csp.sentinel.slots.block.flow.param.ParamFlowRuleManager;
import com.alibaba.csp.sentinel.slots.system.SystemRule;
import com.alibaba.csp.sentinel.slots.system.SystemRuleManager;
import com.alibaba.csp.sentinel.transport.config.TransportConfig;
import com.alibaba.csp.sentinel.util.AppNameUtil;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.TypeReference;
import com.ctrip.framework.apollo.Config;
import com.ctrip.framework.apollo.spring.annotation.ApolloConfig;
import lombok.Data;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* @author shixinke
* crated 19-4-9 下午4:07
* @version 1.0
*/
@Configuration
@Data
@Slf4j
public class SentinelRuleConfig {
@Autowired
private Environment env;
private List<String> flowRuleKeys = new ArrayList<>(0);
private List<String> degradeRuleKeys = new ArrayList<>(0);
private List<String> authorityRuleKeys = new ArrayList<>(0);
private List<String> systemRuleKeys = new ArrayList<>(0);
private List<String> paramFlowRuleKeys = new ArrayList<>(0);
private String flowRulePrefix;
private String degradeRulePrefix;
private String systemRulePrefix;
private String authorityRulePrefix;
private String paramFlowRulePrefix;
private String apolloNamespace;
private String sentinelLogDir;
private String dashboardServer;
private String projectName;
private static final String FLOW_RULE_PREFIX_KEY = "sentinel.flow.rules.prefix";
private static final String DEGRADE_RULE_PREFIX_KEY = "sentinel.degrade.rules.prefix";
private static final String AUTHORITY_RULE_PREFIX_KEY = "sentinel.authority.rules.prefix";
private static final String SYSTEM_RULE_PREFIX_KEY = "sentinel.system.rules.prefix";
private static final String PARAM_FLOW_RULE_PREFIX_KEY = "sentinel.paramFlow.rules.prefix";
private static final String SENTINEL_APOLLO_RULE_NAMESPACE_KEY = "apollo.sentinel.rules.namespace";
private static final String SENTINEL_LOG_DIR_KEY = "csp.sentinel.log.dir";
private static final String SENTINEL_SERVER_KEY = "csp.sentinel.dashboard.server";
private static final String PROJECT_NAME_KEY = "project.name";
@ApolloConfig
private Config config;
@Bean
public SentinelResourceAspect sentinelResourceAspect() {
initConfig();
initRuleKeys();
String defaultRules = "[]";
loadFlowRules(defaultRules);
loadDegradeRules(defaultRules);
loadAuthorityRules(defaultRules);
loadSystemRules(defaultRules);
loadParamFlowRules(defaultRules);
return new SentinelResourceAspect();
}
private void loadFlowRules(String defaultRules) {
if (!CollectionUtils.isEmpty(flowRuleKeys)) {
for (String flowRuleKey : flowRuleKeys) {
ReadableDataSource<String, List<FlowRule>> flowRuleDataSource = new ApolloDataSource<>(apolloNamespace, flowRuleKey, defaultRules, source -> {
log.info("flow rules source:{}", source);
return JSON.parseObject(source, new TypeReference<List<FlowRule>>() {});});
FlowRuleManager.register2Property(flowRuleDataSource.getProperty());
}
log.info("flow rules:{}", FlowRuleManager.getRules());
}
}
private void loadDegradeRules(String defaultRules) {
if (!CollectionUtils.isEmpty(degradeRuleKeys)) {
for (String degradeRuleKey : degradeRuleKeys) {
ReadableDataSource<String, List<DegradeRule>> degradeRuleDataSource = new ApolloDataSource<>(apolloNamespace, degradeRuleKey, defaultRules, source -> {
log.info("degrade rules source:{}", source);
return JSON.parseObject(source, new TypeReference<List<DegradeRule>>() {});});
DegradeRuleManager.register2Property(degradeRuleDataSource.getProperty());
}
log.info("degrade rules:{}", DegradeRuleManager.getRules());
}
}
private void loadAuthorityRules(String defaultRules) {
if (!CollectionUtils.isEmpty(authorityRuleKeys)) {
for (String authorityRuleKey : authorityRuleKeys) {
ReadableDataSource<String, List<AuthorityRule>> authorityRuleDataSource = new ApolloDataSource<>(apolloNamespace, authorityRuleKey, defaultRules, source -> {
log.info("authority rules source:{}", source);
return JSON.parseObject(source, new TypeReference<List<AuthorityRule>>() {});});
AuthorityRuleManager.register2Property(authorityRuleDataSource.getProperty());
}
log.info("authority rules:{}", AuthorityRuleManager.getRules());
}
}
private void loadSystemRules(String defaultRules) {
if (!CollectionUtils.isEmpty(systemRuleKeys)) {
for (String systemRuleKey : systemRuleKeys) {
ReadableDataSource<String, List<SystemRule>> systemRuleDataSource = new ApolloDataSource<>(apolloNamespace, systemRuleKey, defaultRules, source -> {
log.info("system rules source:{}", source);
return JSON.parseObject(source, new TypeReference<List<SystemRule>>() {});});
SystemRuleManager.register2Property(systemRuleDataSource.getProperty());
}
log.info("system rules:{}", SystemRuleManager.getRules());
}
}
private void loadParamFlowRules(String defaultRules) {
if (!CollectionUtils.isEmpty(paramFlowRuleKeys)) {
for (String paramFlowRuleKey : paramFlowRuleKeys) {
ReadableDataSource<String, List<ParamFlowRule>> paramFlowRuleDataSource = new ApolloDataSource<>(apolloNamespace, paramFlowRuleKey, defaultRules, source -> {
log.info("param flow rules source:{}", source);
return JSON.parseObject(source, new TypeReference<List<ParamFlowRule>>() {});});
ParamFlowRuleManager.register2Property(paramFlowRuleDataSource.getProperty());
}
log.info("param flow rules:{}", ParamFlowRuleManager.getRules());
}
}
private void initRuleKeys() {
Set<String> properties = new HashSet<>(5);
if (config != null) {
properties = config.getPropertyNames();
} else {
log.debug("not enabled apollo");
}
if (!CollectionUtils.isEmpty(properties)) {
for (String key : properties) {
if (!StringUtils.isEmpty(flowRulePrefix) && key.startsWith(flowRulePrefix) && !key.equals(flowRulePrefix)) {
flowRuleKeys.add(key);
} else if (!StringUtils.isEmpty(degradeRulePrefix) && key.startsWith(degradeRulePrefix) && !key.equals(degradeRulePrefix)) {
degradeRuleKeys.add(key);
} else if (!StringUtils.isEmpty(systemRulePrefix) && key.startsWith(systemRulePrefix) && !key.equals(systemRulePrefix)) {
systemRuleKeys.add(key);
} else if (!StringUtils.isEmpty(authorityRulePrefix) && key.startsWith(authorityRulePrefix) && !key.equals(authorityRulePrefix)) {
authorityRuleKeys.add(key);
} else if (!StringUtils.isEmpty(paramFlowRulePrefix) && key.startsWith(paramFlowRulePrefix) && !key.equals(paramFlowRulePrefix)) {
paramFlowRuleKeys.add(key);
}
}
}
}
private void initConfig() {
flowRulePrefix = env.getProperty(FLOW_RULE_PREFIX_KEY);
degradeRulePrefix = env.getProperty(DEGRADE_RULE_PREFIX_KEY);
systemRulePrefix = env.getProperty(SYSTEM_RULE_PREFIX_KEY);
authorityRulePrefix = env.getProperty(AUTHORITY_RULE_PREFIX_KEY);
paramFlowRulePrefix = env.getProperty(PARAM_FLOW_RULE_PREFIX_KEY);
apolloNamespace = env.getProperty(SENTINEL_APOLLO_RULE_NAMESPACE_KEY);
sentinelLogDir = env.getProperty(SENTINEL_LOG_DIR_KEY);
dashboardServer = env.getProperty(SENTINEL_SERVER_KEY);
projectName = env.getProperty(PROJECT_NAME_KEY);
if (!StringUtils.isEmpty(sentinelLogDir)) {
System.setProperty(LogBase.LOG_DIR, sentinelLogDir);
}
if (!StringUtils.isEmpty(projectName)) {
System.setProperty(AppNameUtil.APP_NAME, projectName);
}
if (!StringUtils.isEmpty(dashboardServer)) {
SentinelConfig.setConfig(TransportConfig.CONSOLE_SERVER, dashboardServer);
}
}
}
|
package io.morethan.javabenchmarks.experimental;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import com.google.common.collect.ImmutableList;
/**
* Benchmarking different approaches access primitive values (flexible schema).
*/
@Fork(value = 2)
@Warmup(iterations = 10)
@Measurement(iterations = 5)
@BenchmarkMode(Mode.Throughput)
@OutputTimeUnit(TimeUnit.MILLISECONDS)
@State(Scope.Benchmark)
public class PrimitivWrapperBenchmark {
private ValueBase _valueBase = new ValueBase(ImmutableList.<PrimitivHolder>builder()
.add(new PrimitivStringHolder())
.add(new PrimitivLongHolder())
.add(new PrimitivBooleanHolder())
.add(new PrimitivStringHolder())
.add(new PrimitivLongHolder())
.add(new PrimitivBooleanHolder())
.build());
private long _currentRow = 0;
@Benchmark
public void cast1(Blackhole blackhole) {
blackhole.consume(_valueBase.getString_Cast1(0, _currentRow));
blackhole.consume(_valueBase.getLong_Cast1(1, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast1(2, _currentRow));
blackhole.consume(_valueBase.getString_Cast1(3, _currentRow));
blackhole.consume(_valueBase.getLong_Cast1(4, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast1(5, _currentRow));
_currentRow++;
}
@Benchmark
public void cast2(Blackhole blackhole) {
blackhole.consume(_valueBase.getString_Cast2(0, _currentRow));
blackhole.consume(_valueBase.getLong_Cast2(1, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast2(2, _currentRow));
blackhole.consume(_valueBase.getString_Cast2(3, _currentRow));
blackhole.consume(_valueBase.getLong_Cast2(4, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast2(5, _currentRow));
_currentRow++;
}
@Benchmark
public void cast3(Blackhole blackhole) {
blackhole.consume(_valueBase.getString_Cast3(0, _currentRow));
blackhole.consume(_valueBase.getLong_Cast3(1, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast3(2, _currentRow));
blackhole.consume(_valueBase.getString_Cast3(3, _currentRow));
blackhole.consume(_valueBase.getLong_Cast3(4, _currentRow));
blackhole.consume(_valueBase.getBoolean_Cast3(5, _currentRow));
_currentRow++;
}
@Benchmark
public void extractor(Blackhole blackhole) {
blackhole.consume(_valueBase.getString_Extract(0, _currentRow));
blackhole.consume(_valueBase.getLong_Extract(1, _currentRow));
blackhole.consume(_valueBase.getBoolean_Extract(2, _currentRow));
blackhole.consume(_valueBase.getString_Extract(3, _currentRow));
blackhole.consume(_valueBase.getLong_Extract(4, _currentRow));
blackhole.consume(_valueBase.getBoolean_Extract(5, _currentRow));
_currentRow++;
}
private static class ValueBase {
private final List<PrimitivHolder> _primitivHolders;
private final PrimitivExtractor[] _primitivExtractors;
public ValueBase(List<PrimitivHolder> primitivHolders) {
_primitivHolders = primitivHolders;
_primitivExtractors = new PrimitivExtractor[primitivHolders.size()];
for (int i = 0; i < _primitivExtractors.length; i++) {
_primitivExtractors[i] = PrimitivExtractor.create(primitivHolders.get(i), i);
}
}
public String getString_Cast1(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (primitivHolder instanceof PrimitivStringHolder) {
return ((PrimitivStringHolder) primitivHolder).getString(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public long getLong_Cast1(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (primitivHolder instanceof PrimitivLongHolder) {
return ((PrimitivLongHolder) primitivHolder).getLong(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public boolean getBoolean_Cast1(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (primitivHolder instanceof PrimitivBooleanHolder) {
return ((PrimitivBooleanHolder) primitivHolder).getBoolean(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public String getString_Cast2(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (PrimitivStringHolder.class.isInstance(primitivHolder)) {
return PrimitivStringHolder.class.cast(primitivHolder).getString(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public long getLong_Cast2(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (PrimitivLongHolder.class.isInstance(primitivHolder)) {
return PrimitivLongHolder.class.cast(primitivHolder).getLong(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public boolean getBoolean_Cast2(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
if (PrimitivBooleanHolder.class.isInstance(primitivHolder)) {
return PrimitivBooleanHolder.class.cast(primitivHolder).getBoolean(row);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
public String getString_Cast3(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
try {
return PrimitivStringHolder.class.cast(primitivHolder).getString(row);
} catch (ClassCastException e) {
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
}
public long getLong_Cast3(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
try {
return PrimitivLongHolder.class.cast(primitivHolder).getLong(row);
} catch (ClassCastException e) {
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
}
public boolean getBoolean_Cast3(int columnIndex, long row) {
PrimitivHolder primitivHolder = _primitivHolders.get(columnIndex);
try {
return PrimitivBooleanHolder.class.cast(primitivHolder).getBoolean(row);
} catch (ClassCastException e) {
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
}
public String getString_Extract(int columnIndex, long row) {
return _primitivExtractors[columnIndex].getString(row);
}
public long getLong_Extract(int columnIndex, long row) {
return _primitivExtractors[columnIndex].getLong(row);
}
public boolean getBoolean_Extract(int columnIndex, long row) {
return _primitivExtractors[columnIndex].getBoolean(row);
}
}
private static interface PrimitivHolder {
// marker interface
}
private static class PrimitivStringHolder implements PrimitivHolder {
public String getString(long row) {
return "string" + row;
}
}
private static class PrimitivLongHolder implements PrimitivHolder {
public long getLong(long row) {
return row;
}
}
private static class PrimitivBooleanHolder implements PrimitivHolder {
public boolean getBoolean(long row) {
return row % 2 == 0;
}
}
private static class PrimitivExtractor {
private final PrimitivHolder _primitivHolder;
private final int _columnIndex;
public PrimitivExtractor(PrimitivHolder primitivHolder, int columnIndex) {
_primitivHolder = primitivHolder;
_columnIndex = columnIndex;
}
public String getString(long row) {
throw new UnsupportedOperationException(_columnIndex + ": " + _primitivHolder.getClass().getSimpleName());
}
public long getLong(long row) {
throw new UnsupportedOperationException(_columnIndex + ": " + _primitivHolder.getClass().getSimpleName());
}
public boolean getBoolean(long row) {
throw new UnsupportedOperationException(_columnIndex + ": " + _primitivHolder.getClass().getSimpleName());
}
public static PrimitivExtractor create(PrimitivHolder primitivHolder, int columnIndex) {
if (primitivHolder instanceof PrimitivStringHolder) {
return new PrimitivStringExtractor((PrimitivStringHolder) primitivHolder, columnIndex);
} else if (primitivHolder instanceof PrimitivLongHolder) {
return new PrimitivLongExtractor((PrimitivLongHolder) primitivHolder, columnIndex);
} else if (primitivHolder instanceof PrimitivBooleanHolder) {
return new PrimitivBooleanExtractor((PrimitivBooleanHolder) primitivHolder, columnIndex);
}
throw new UnsupportedOperationException(columnIndex + ": " + primitivHolder.getClass().getSimpleName());
}
}
private static class PrimitivStringExtractor extends PrimitivExtractor {
private PrimitivStringHolder _primitivHolder;
public PrimitivStringExtractor(PrimitivStringHolder primitivHolder, int columnIndex) {
super(primitivHolder, columnIndex);
_primitivHolder = primitivHolder;
}
@Override
public String getString(long row) {
return _primitivHolder.getString(row);
}
}
private static class PrimitivLongExtractor extends PrimitivExtractor {
private PrimitivLongHolder _primitivHolder;
public PrimitivLongExtractor(PrimitivLongHolder primitivHolder, int columnIndex) {
super(primitivHolder, columnIndex);
_primitivHolder = primitivHolder;
}
@Override
public long getLong(long row) {
return _primitivHolder.getLong(row);
}
}
private static class PrimitivBooleanExtractor extends PrimitivExtractor {
private PrimitivBooleanHolder _primitivHolder;
public PrimitivBooleanExtractor(PrimitivBooleanHolder primitivHolder, int columnIndex) {
super(primitivHolder, columnIndex);
_primitivHolder = primitivHolder;
}
@Override
public boolean getBoolean(long row) {
return _primitivHolder.getBoolean(row);
}
}
}
|
package com.skeleton.mvp.fragment;
/**
* Created by rajatdhamija
* 19/04/18.
*/
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Build;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import android.widget.RadioButton;
import android.widget.RadioGroup;
import androidx.appcompat.widget.AppCompatTextView;
import com.google.android.material.bottomsheet.BottomSheetDialogFragment;
import com.skeleton.mvp.R;
import com.skeleton.mvp.data.db.CommonData;
import com.skeleton.mvp.data.model.fcCommon.NotificationSnoozeTime;
import com.skeleton.mvp.data.model.notifications.NotificationSettingsActivity;
import com.skeleton.mvp.util.Log;
import com.skeleton.mvp.util.Utils;
import com.skeleton.mvp.utils.DateUtils;
import java.util.ArrayList;
public class SnoozeBottomSheetFragment extends BottomSheetDialogFragment implements View.OnClickListener {
private Context context;
private LinearLayout llRadioButtons;
private ArrayList<NotificationSnoozeTime> notificationSnoozeTimes = new ArrayList<>();
private Boolean isExpired = false;
private LinearLayout llSnoozeOptions, llSnoozeView;
private AppCompatTextView tvSnoozeTime, tvEndSnooze;
private String snoozeTime;
public SnoozeBottomSheetFragment() {
}
public static SnoozeBottomSheetFragment newInstance(int arg, Context context, Boolean isExpired, String snoozeTime) {
SnoozeBottomSheetFragment frag = new SnoozeBottomSheetFragment();
Bundle args = new Bundle();
frag.setArguments(args);
frag.setContext(context);
frag.setIsExpired(isExpired);
frag.setSnoozeTime(snoozeTime);
return frag;
}
private void setSnoozeTime(String snoozeTime) {
this.snoozeTime = snoozeTime;
}
private void setIsExpired(Boolean isExpired) {
this.isExpired = isExpired;
}
private void setContext(Context context) {
this.context = context;
}
private void createRadioButton() {
final RadioButton[] rb = new RadioButton[notificationSnoozeTimes.size()];
RadioGroup rg = new RadioGroup(context); //create the RadioGroup
rg.setOrientation(RadioGroup.VERTICAL);//or RadioGroup.VERTICAL
for (int i = 0; i < notificationSnoozeTimes.size() - 1; i++) {
rb[i] = new RadioButton(context);
rb[i].setText(notificationSnoozeTimes.get(i).getDescription());
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
rb[i].setTextAppearance(R.style.CustomTextAppearance_TitilliumWeb);
}
rb[i].setTextSize(17f);
rb[i].setPadding(0, Utils.dpToPx(context, 20), 0, Utils.dpToPx(context, 20));
rb[i].setId(i);
rg.addView(rb[i]);
}
rg.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {
Log.e("Position:", checkedId + "");
((NotificationSettingsActivity) context).snoozeNotifications(notificationSnoozeTimes.get(checkedId).getTime_slot());
dismiss();
}
});
llRadioButtons.addView(rg);//you add the whole RadioGroup to the layout
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@SuppressLint("SetTextI18n")
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.snooze_bottom_sheet, container, false);
llRadioButtons = view.findViewById(R.id.llRadioButtons);
llSnoozeOptions = view.findViewById(R.id.llSnoozeOptions);
llSnoozeView = view.findViewById(R.id.llSnoozeView);
tvSnoozeTime = view.findViewById(R.id.tvSnoozeTime);
tvEndSnooze = view.findViewById(R.id.tvEndSnooze);
notificationSnoozeTimes = (ArrayList<NotificationSnoozeTime>) CommonData.getCommonResponse().getData().getUserInfo().getNotificationSnoozeTime();
createRadioButton();
if (isExpired) {
llSnoozeOptions.setVisibility(View.VISIBLE);
llSnoozeView.setVisibility(View.GONE);
} else {
llSnoozeOptions.setVisibility(View.GONE);
llSnoozeView.setVisibility(View.VISIBLE);
}
tvSnoozeTime.setText("Scheduled until " + new DateUtils().getDate(new DateUtils().convertToLocal(snoozeTime)) + ", " + new DateUtils().getTime(new DateUtils().convertToLocal(snoozeTime)));
tvEndSnooze.setOnClickListener(v -> {
((NotificationSettingsActivity) context).endSnooze();
dismiss();
});
return view;
}
@Override
public void onClick(View v) {
dismiss();
switch (v.getId()) {
}
}
@Override
public void onCancel(DialogInterface dialog) {
super.onCancel(dialog);
}
}
|
/**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
*/
package com.microsoft.azure.management.storageimportexport.v2016_11_01;
import com.microsoft.azure.arm.collection.SupportsCreating;
import com.microsoft.azure.arm.resources.collection.SupportsDeletingByResourceGroup;
import com.microsoft.azure.arm.resources.collection.SupportsBatchDeletion;
import com.microsoft.azure.arm.resources.collection.SupportsGettingByResourceGroup;
import rx.Observable;
import com.microsoft.azure.arm.resources.collection.SupportsListingByResourceGroup;
import com.microsoft.azure.arm.collection.SupportsListing;
import com.microsoft.azure.management.storageimportexport.v2016_11_01.implementation.JobsInner;
import com.microsoft.azure.arm.model.HasInner;
/**
* Type representing Jobs.
*/
public interface Jobs extends SupportsCreating<JobResponse.DefinitionStages.Blank>, SupportsDeletingByResourceGroup, SupportsBatchDeletion, SupportsGettingByResourceGroup<JobResponse>, SupportsListingByResourceGroup<JobResponse>, SupportsListing<JobResponse>, HasInner<JobsInner> {
}
|
/*
* Copyright (C) 2017 microG Project Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.microg.gms;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.database.Cursor;
import android.database.MatrixCursor;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.Log;
public class ChimeraSpoofProvider extends ContentProvider {
private static final String TAG = "GmsChimeraSpoof";
private static final String[] COLUMNS = new String[]{"version", "apkPath", "loaderPath", "apkDescStr"};
@Override
public boolean onCreate() {
return true;
}
@Nullable
@Override
public Cursor query(@NonNull Uri uri, String[] projection, String selection, String[] selectionArgs, String sortOrder) {
MatrixCursor cursor = new MatrixCursor(COLUMNS);
Log.d(TAG, "query: " + uri);
return cursor;
}
@Nullable
@Override
public String getType(@NonNull Uri uri) {
return "vnd.android.cursor.item/com.google.android.gms.chimera";
}
@Nullable
@Override
public Uri insert(@NonNull Uri uri, ContentValues values) {
return null;
}
@Override
public int delete(@NonNull Uri uri, String selection, String[] selectionArgs) {
return 0;
}
@Override
public int update(@NonNull Uri uri, ContentValues values, String selection, String[] selectionArgs) {
return 0;
}
}
|
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.exporter.logging;
import static io.opentelemetry.api.common.AttributeKey.booleanKey;
import static io.opentelemetry.api.common.AttributeKey.longKey;
import static io.opentelemetry.api.common.AttributeKey.stringKey;
import static java.util.Collections.singletonList;
import static org.assertj.core.api.Assertions.assertThat;
import io.github.netmikey.logunit.api.LogCapturer;
import io.opentelemetry.api.common.Attributes;
import io.opentelemetry.api.trace.SpanContext;
import io.opentelemetry.api.trace.SpanKind;
import io.opentelemetry.api.trace.TraceFlags;
import io.opentelemetry.api.trace.TraceState;
import io.opentelemetry.sdk.common.CompletableResultCode;
import io.opentelemetry.sdk.common.InstrumentationLibraryInfo;
import io.opentelemetry.sdk.testing.trace.TestSpanData;
import io.opentelemetry.sdk.trace.data.EventData;
import io.opentelemetry.sdk.trace.data.SpanData;
import io.opentelemetry.sdk.trace.data.StatusData;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import java.util.logging.StreamHandler;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.slf4j.event.Level;
/** Tests for the {@link LoggingSpanExporter}. */
class LoggingSpanExporterTest {
private static final SpanData SPAN1 =
TestSpanData.builder()
.setHasEnded(true)
.setSpanContext(
SpanContext.create(
"12345678876543211234567887654321",
"8765432112345678",
TraceFlags.getSampled(),
TraceState.getDefault()))
.setStartEpochNanos(100)
.setEndEpochNanos(100 + 1000)
.setStatus(StatusData.ok())
.setName("testSpan1")
.setKind(SpanKind.INTERNAL)
.setAttributes(Attributes.of(stringKey("animal"), "cat", longKey("lives"), 9L))
.setEvents(
Collections.singletonList(
EventData.create(
100 + 500,
"somethingHappenedHere",
Attributes.of(booleanKey("important"), true))))
.setTotalRecordedEvents(1)
.setTotalRecordedLinks(0)
.setInstrumentationLibraryInfo(InstrumentationLibraryInfo.create("tracer1", null))
.build();
private static final SpanData SPAN2 =
TestSpanData.builder()
.setHasEnded(false)
.setSpanContext(
SpanContext.create(
"12340000000043211234000000004321",
"8765000000005678",
TraceFlags.getSampled(),
TraceState.getDefault()))
.setStartEpochNanos(500)
.setEndEpochNanos(500 + 1001)
.setStatus(StatusData.error())
.setName("testSpan2")
.setKind(SpanKind.CLIENT)
.setInstrumentationLibraryInfo(InstrumentationLibraryInfo.create("tracer2", "1.0"))
.build();
@RegisterExtension
LogCapturer logs = LogCapturer.create().captureForType(LoggingSpanExporter.class);
LoggingSpanExporter exporter;
@BeforeEach
void setUp() {
exporter = new LoggingSpanExporter();
}
@AfterEach
void tearDown() {
exporter.close();
}
@Test
void log() {
exporter.export(Arrays.asList(SPAN1, SPAN2));
assertThat(logs.getEvents())
.hasSize(2)
.allSatisfy(log -> assertThat(log.getLevel()).isEqualTo(Level.INFO));
assertThat(logs.getEvents().get(0).getMessage())
.isEqualTo(
"'testSpan1' : 12345678876543211234567887654321 8765432112345678 "
+ "INTERNAL [tracer: tracer1:] "
+ "{animal=\"cat\", lives=9}");
assertThat(logs.getEvents().get(1).getMessage())
.isEqualTo(
"'testSpan2' : 12340000000043211234000000004321 8765000000005678 "
+ "CLIENT [tracer: tracer2:1.0] {}");
}
@Test
void returnCode() {
long epochNanos = TimeUnit.MILLISECONDS.toNanos(System.currentTimeMillis());
SpanData spanData =
TestSpanData.builder()
.setHasEnded(true)
.setSpanContext(
SpanContext.create(
"12345678876543211234567887654321",
"8765432112345678",
TraceFlags.getSampled(),
TraceState.getDefault()))
.setStartEpochNanos(epochNanos)
.setEndEpochNanos(epochNanos + 1000)
.setStatus(StatusData.ok())
.setName("testSpan")
.setKind(SpanKind.INTERNAL)
.setEvents(
Collections.singletonList(
EventData.create(
epochNanos + 500,
"somethingHappenedHere",
Attributes.of(booleanKey("important"), true))))
.setTotalRecordedEvents(1)
.setTotalRecordedLinks(0)
.build();
CompletableResultCode resultCode = exporter.export(singletonList(spanData));
assertThat(resultCode.isSuccess()).isTrue();
}
@Test
void testFlush() {
final AtomicBoolean flushed = new AtomicBoolean(false);
Logger.getLogger(LoggingSpanExporter.class.getName())
.addHandler(
new StreamHandler(new PrintStream(new ByteArrayOutputStream()), new SimpleFormatter()) {
@Override
public synchronized void flush() {
flushed.set(true);
}
});
exporter.flush();
assertThat(flushed.get()).isTrue();
}
}
|
package lsh.ext.gson.ext.com.google.common.collect;
import java.io.IOException;
import java.util.Map;
import com.google.common.base.Converter;
import com.google.common.base.Supplier;
import com.google.common.collect.HashBasedTable;
import com.google.common.collect.Table;
import com.google.gson.TypeAdapter;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
/**
* Represents a type adapter for {@link Table} from Google Guava.
*
* @author Lyubomyr Shaydariv
* @see TableTypeAdapterFactory
* @since 0-SNAPSHOT
*/
public final class TableTypeAdapter<R, C, V>
extends TypeAdapter<Table<R, C, V>> {
private static final Supplier<? extends Table<?, ?, ?>> defaultNewTableFactory = HashBasedTable::create;
private static final Converter<?, ?> defaultKeyConverter = Converter.identity();
private final TypeAdapter<V> valueTypeAdapter;
private final Supplier<? extends Table<R, C, V>> newTableFactory;
private final Converter<R, String> rowKeyConverter;
private final Converter<String, R> reverseRowKeyConverter;
private final Converter<C, String> forwardColumnKeyConverter;
private final Converter<String, C> columnKeyConverter;
private TableTypeAdapter(final TypeAdapter<V> valueTypeAdapter, final Supplier<? extends Table<R, C, V>> newTableFactory,
final Converter<R, String> rowKeyConverter, final Converter<C, String> forwardColumnKeyConverter) {
this.valueTypeAdapter = valueTypeAdapter;
this.newTableFactory = newTableFactory;
this.rowKeyConverter = rowKeyConverter;
reverseRowKeyConverter = rowKeyConverter.reverse();
this.forwardColumnKeyConverter = forwardColumnKeyConverter;
columnKeyConverter = forwardColumnKeyConverter.reverse();
}
/**
* @param valueTypeAdapter Table value type adapter
* @param <V> Table value type
*
* @return A {@link TableTypeAdapter} instance.
*
* @since 0-SNAPSHOT
*/
public static <V> TypeAdapter<Table<String, String, V>> create(final TypeAdapter<V> valueTypeAdapter) {
@SuppressWarnings("unchecked")
final Supplier<? extends Table<String, String, V>> newTableFactory = (Supplier<? extends Table<String, String, V>>) defaultNewTableFactory;
@SuppressWarnings("unchecked")
final Converter<String, String> rowKeyConverter = (Converter<String, String>) defaultKeyConverter;
@SuppressWarnings("unchecked")
final Converter<String, String> columnKeyConverter = (Converter<String, String>) defaultKeyConverter;
return create(valueTypeAdapter, newTableFactory, rowKeyConverter, columnKeyConverter);
}
/**
* @param valueTypeAdapter Table value type adapter
* @param newTableFactory A {@link Table} factory to create instance used while deserialization
* @param <V> Table value type
*
* @return A {@link TableTypeAdapter} instance.
*
* @since 0-SNAPSHOT
*/
public static <V> TypeAdapter<Table<String, String, V>> create(final TypeAdapter<V> valueTypeAdapter,
final Supplier<? extends Table<String, String, V>> newTableFactory) {
@SuppressWarnings("unchecked")
final Converter<String, String> rowKeyConverter = (Converter<String, String>) defaultKeyConverter;
@SuppressWarnings("unchecked")
final Converter<String, String> columnKeyConverter = (Converter<String, String>) defaultKeyConverter;
return create(valueTypeAdapter, newTableFactory, rowKeyConverter, columnKeyConverter);
}
/**
* @param valueTypeAdapter Table value type adapter
* @param rowKeyConverter A converter to convert row key to JSON object property names
* @param columnKeyConverter A converter to convert column key to JSON object property names
* @param <R> Table row type
* @param <C> Table column type
* @param <V> Table value type
*
* @return A {@link TableTypeAdapter} instance.
*
* @since 0-SNAPSHOT
*/
public static <R, C, V> TypeAdapter<Table<R, C, V>> create(final TypeAdapter<V> valueTypeAdapter, final Converter<R, String> rowKeyConverter,
final Converter<C, String> columnKeyConverter) {
@SuppressWarnings("unchecked")
final Supplier<? extends Table<R, C, V>> newTableFactory = (Supplier<? extends Table<R, C, V>>) defaultNewTableFactory;
return create(valueTypeAdapter, newTableFactory, rowKeyConverter, columnKeyConverter);
}
/**
* @param valueTypeAdapter Table value type adapter
* @param newTableFactory A {@link Table} factory to create instance used while deserialization
* @param rowKeyConverter A converter to convert row key to JSON object property names
* @param columnKeyConverter A converter to convert column key to JSON object property names
* @param <R> Table row type
* @param <C> Table column type
* @param <V> Table value type
*
* @return A {@link TableTypeAdapter} instance.
*
* @since 0-SNAPSHOT
*/
public static <R, C, V> TypeAdapter<Table<R, C, V>> create(final TypeAdapter<V> valueTypeAdapter, final Supplier<? extends Table<R, C, V>> newTableFactory,
final Converter<R, String> rowKeyConverter, final Converter<C, String> columnKeyConverter) {
return new TableTypeAdapter<>(valueTypeAdapter, newTableFactory, rowKeyConverter, columnKeyConverter)
.nullSafe();
}
@Override
public void write(final JsonWriter out, final Table<R, C, V> table)
throws IOException {
out.beginObject();
final Map<R, Map<C, V>> rowMap = table.rowMap();
for ( final Map.Entry<R, Map<C, V>> rowEntry : rowMap.entrySet() ) {
final String rowKey = rowKeyConverter.convert(rowEntry.getKey());
out.name(rowKey);
out.beginObject();
final Map<C, V> columnMap = rowEntry.getValue();
for ( final Map.Entry<C, V> columnEntry : columnMap.entrySet() ) {
final String columnKey = forwardColumnKeyConverter.convert(columnEntry.getKey());
final V value = columnEntry.getValue();
out.name(columnKey);
valueTypeAdapter.write(out, value);
}
out.endObject();
}
out.endObject();
}
@Override
public Table<R, C, V> read(final JsonReader in)
throws IOException {
final Table<R, C, V> table = newTableFactory.get();
in.beginObject();
while ( in.hasNext() ) {
final R rowKey = reverseRowKeyConverter.convert(in.nextName());
in.beginObject();
while ( in.hasNext() ) {
final C columnKey = columnKeyConverter.convert(in.nextName());
final V value = valueTypeAdapter.read(in);
table.put(rowKey, columnKey, value);
}
in.endObject();
}
in.endObject();
return table;
}
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.lang.sqlpp.util;
import java.io.ByteArrayOutputStream;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import org.apache.asterix.common.exceptions.AsterixException;
import org.apache.asterix.lang.common.base.IAstPrintVisitorFactory;
import org.apache.asterix.lang.common.base.ILangExpression;
import org.apache.asterix.lang.common.base.Statement;
import org.apache.asterix.lang.common.visitor.QueryPrintVisitor;
import org.apache.asterix.lang.sqlpp.visitor.SqlppAstPrintVisitorFactory;
public class SqlppAstPrintUtil {
private static final IAstPrintVisitorFactory astPrintVisitorFactory = new SqlppAstPrintVisitorFactory();
private SqlppAstPrintUtil() {
}
/**
* Prints the AST (abstract syntax tree) of an ILangExpression.
*
* @param expr
* the language expression.
* @param output
* a writer for printing strings.
* @throws AsterixException
*/
public static void print(ILangExpression expr, PrintWriter output) throws AsterixException {
QueryPrintVisitor visitor = astPrintVisitorFactory.createLangVisitor(output);
expr.accept(visitor, 0);
output.flush();
}
/**
* Prints the AST of a list of top-level language statements.
*
* @param statements
* a list of statements of a query
* @param output
* a writer for printing strings.
* @throws AsterixException
*/
public static void print(List<Statement> statements, PrintWriter output) throws AsterixException {
QueryPrintVisitor visitor = astPrintVisitorFactory.createLangVisitor(output);
for (Statement statement : statements) {
statement.accept(visitor, 0);
}
output.flush();
}
/**
* @param expr
* a language expression.
* @return the AST of a language expression.
* @throws AsterixException
*/
public static String toString(ILangExpression expr) throws AsterixException {
List<ILangExpression> exprs = new ArrayList<>();
exprs.add(expr);
return toString(exprs);
}
/**
* @param exprs
* a list of language expression.
* @return an AST of the input language expressions.
* @throws AsterixException
*/
public static String toString(List<ILangExpression> exprs) throws AsterixException {
ByteArrayOutputStream bos = new ByteArrayOutputStream();
PrintWriter output = new PrintWriter(bos);
QueryPrintVisitor visitor = astPrintVisitorFactory.createLangVisitor(output);
for (ILangExpression expr : exprs) {
expr.accept(visitor, 0);
}
output.close();
return new String(bos.toByteArray());
}
}
|
package org.itjing.lambda;
import java.util.function.Predicate;
/**
* @author lijing
* @date 2021年12月09日 16:15
* @description
*/
public class LambdaDemo11 {
public static void main(String[] args) {
// 使用Lambda表达式判断一个字符串即包含W也包含H
testAnd(s1 -> s1.contains("W"), s2 -> s2.contains("H"), "Hello World");
// 使用Lambda表达式判断一个字符串包含W或包含H
testOr(s1 -> s1.contains("W"), s2 -> s2.contains("H"), "Hello World");
// 使用Lambda表达式判断一个字符串不包含W
testNegate(s1 -> s1.contains("W"), "Hello world");
}
public static void testAnd(Predicate<String> p1, Predicate<String> p2, String str) {
boolean test = p1.and(p2).test(str);
if (test) {
System.out.println(str + "既包含W也包含H");
}
}
public static void testOr(Predicate<String> p1, Predicate<String> p2, String str) {
boolean test = p1.or(p2).test(str);
if (test) {
System.out.println(str + "包含W或包含H");
}
}
public static void testNegate(Predicate<String> p1, String str) {
boolean test = p1.negate().test(str);
if (test) {
System.out.println(str + "不包含W");
}
}
}
|
package org.batfish.dataplane.protocols;
import static org.batfish.datamodel.ResolutionRestriction.alwaysTrue;
import static org.batfish.dataplane.ibdp.TestUtils.annotateRoute;
import static org.batfish.dataplane.protocols.StaticRouteHelper.shouldActivateNextHopIpRoute;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import org.batfish.datamodel.ConnectedRoute;
import org.batfish.datamodel.Ip;
import org.batfish.datamodel.Prefix;
import org.batfish.datamodel.StaticRoute;
import org.batfish.dataplane.rib.Rib;
import org.junit.Before;
import org.junit.Test;
/** Tests for {@link StaticRouteHelper} */
public final class StaticRouteHelperTest {
private Rib _rib;
@Before
public void setup() {
// Empty rib before each test
_rib = new Rib();
}
/** Check no static routes are activated if RIB is empty */
@Test
public void testShouldActivateEmptyRib() {
Ip nextHop = Ip.parse("1.1.1.1");
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(nextHop)
.setAdministrativeCost(1)
.build();
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(false));
}
/** Do not activate if no match for nextHop IP exists */
@Test
public void testShouldActivateNoMatch() {
_rib.mergeRoute(annotateRoute(new ConnectedRoute(Prefix.parse("1.1.1.0/24"), "Eth0")));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("2.2.2.2"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(false));
}
/** Activate if next hop IP matches a route */
@Test
public void testShouldActivateMatch() {
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface("Eth0")
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(true));
}
/** Do not activate if the route to the next hop IP has same prefix as route in question. */
@Test
public void testShouldActivateSelfReferential() {
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.2"))
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("9.9.9.9"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(false));
}
/**
* Activate a route matching its own next-hop IP if there is a more specific matching route
* already in the RIB
*/
@Test
public void testShouldActivateIfExists() {
StaticRoute matching =
StaticRoute.testBuilder().setNetwork(Prefix.strict("1.1.1.1/32")).build();
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.1.1.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.build();
_rib.mergeRoute(annotateRoute(matching));
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(true));
}
/** Activate if route exists for the same prefix but next hop is different */
@Test
public void testShouldActivateWithDiffNextHops() {
// base route
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface("Eth0")
.setAdministrativeCost(1)
.build()));
// Static route 1, same network as sr, but different next hop ip
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.2"))
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(true));
}
/** Allow activation in the RIB even if there would be a FIB resolution loop. */
@Test
public void testShouldActivateWithLoop() {
/*
* Route dependency graph
* 9.9.9.0/24 (nh: 1.1.1.1) --> 1.1.1.0/24 (nh=2.2.2.2) -> 2.2.2.0/24 (nh=9.9.9.9) -> 9.9.9.0/24
*/
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.1.1.0/24"))
.setNextHopIp(Ip.parse("2.2.2.2"))
.setAdministrativeCost(1)
.build()));
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("2.2.2.0/24"))
.setNextHopIp(Ip.parse("9.9.9.9"))
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertThat(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()), equalTo(true));
}
/**
* Do not allow installation of a route that would become longest prefix match for its own next
* hop IP.
*/
@Test
public void testShouldActivateIfCovered() {
_rib.mergeRoute(annotateRoute(new ConnectedRoute(Prefix.parse("9.9.0.0/16"), "Eth0")));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("9.9.9.9"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertFalse(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()));
}
/**
* Activate if route is recursive and next hop IP matches a route that is permitted by restriction
*/
@Test
public void testShouldActivateRecursiveRestrictionPermits() {
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface("Eth0")
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertTrue(shouldActivateNextHopIpRoute(sr, _rib, r -> r.getNetwork().getPrefixLength() == 8));
}
/**
* Do not activate if route is recursive but next hop IP matches no route that is permitted by
* restriction
*/
@Test
public void testShouldActivateRecursiveRestrictionDenies() {
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface("Eth0")
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.build();
// Test & Assert
assertFalse(
shouldActivateNextHopIpRoute(sr, _rib, r -> r.getNetwork().getPrefixLength() == 16));
}
/**
* Do not activate if route is non-recursive and the only routes matching next hop IP are
* non-connected
*/
@Test
public void testShouldActivateNonRecursiveNoConnected() {
_rib.mergeRoute(
annotateRoute(
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("1.0.0.0/8"))
.setNextHopInterface("Eth0")
.setAdministrativeCost(1)
.build()));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.setRecursive(false)
.build();
// Test & Assert
assertFalse(shouldActivateNextHopIpRoute(sr, _rib, alwaysTrue()));
}
/**
* Activate if route is non-recursive and the next hop IP matches a connected route, even if the
* connected route does not match the restriction.
*/
@Test
public void testShouldActivateNonRecursiveConnected() {
_rib.mergeRoute(annotateRoute(new ConnectedRoute(Prefix.parse("1.0.0.0/8"), "Eth0")));
// Route in question
StaticRoute sr =
StaticRoute.testBuilder()
.setNetwork(Prefix.parse("9.9.9.0/24"))
.setNextHopIp(Ip.parse("1.1.1.1"))
.setAdministrativeCost(1)
.setRecursive(false)
.build();
// Test & Assert
assertTrue(shouldActivateNextHopIpRoute(sr, _rib, r -> false));
}
}
|
package invoker;
import java.util.ArrayList;
import java.util.List;
import command.Command;
import command.ExitCommand;
import command.OpenCommand;
import receiver.Receiver;
public class Invoker {
private List<Command> commands;
private Command command;
public Invoker(Receiver receiver) {
commands = new ArrayList<Command>();
commands.add(new OpenCommand(receiver));
commands.add(new ExitCommand(receiver));
}
public void setCommand(TypeCommand typeCommand) {
switch (typeCommand) {
case OPEN:
command = commands.get(TypeCommand.OPEN.ordinal());
break;
case EXIT:
command = commands.get(TypeCommand.EXIT.ordinal());
break;
default:
break;
}
}
public void execute() {
command.execute();
}
}
|
package gregtech.common.tileentities.storage;
import gregtech.api.enums.Textures;
import gregtech.api.interfaces.ITexture;
import gregtech.api.interfaces.tileentity.IGregTechTileEntity;
import gregtech.api.metatileentity.MetaTileEntity;
import gregtech.api.metatileentity.implementations.GT_MetaTileEntity_StorageTank;
import gregtech.api.render.TextureFactory;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumChatFormatting;
import net.minecraftforge.common.util.ForgeDirection;
import net.minecraftforge.fluids.FluidStack;
import net.minecraftforge.fluids.IFluidHandler;
import java.text.NumberFormat;
import static gregtech.api.enums.Textures.BlockIcons.*;
public class GT_MetaTileEntity_SuperTank extends GT_MetaTileEntity_StorageTank {
public GT_MetaTileEntity_SuperTank(int aID, String aName, String aNameRegional, int aTier) {
super(aID, aName, aNameRegional, aTier, 3, "Stores " + NumberFormat.getNumberInstance().format(CommonSizeCompute(aTier)) + "L of fluid");
}
public GT_MetaTileEntity_SuperTank(String aName, int aTier, String aDescription, ITexture[][][] aTextures) {
super(aName, aTier, 3, aDescription, aTextures);
}
public GT_MetaTileEntity_SuperTank(String aName, int aTier, String[] aDescription, ITexture[][][] aTextures) {
super(aName, aTier, 3, aDescription, aTextures);
}
@Override
public MetaTileEntity newMetaEntity(IGregTechTileEntity aTileEntity) {
return new GT_MetaTileEntity_SuperTank(mName, mTier, mDescription, mTextures);
}
@Override
protected Textures.BlockIcons textureGlowOverlay() {
return OVERLAY_STANK_GLOW;
}
@Override
protected Textures.BlockIcons textureOverlay() {
return OVERLAY_STANK;
}
public String[] getDescription() {
return new String[] {this.mDescription};
}
@Override
public boolean onRightclick(IGregTechTileEntity aBaseMetaTileEntity, EntityPlayer aPlayer) {
if (aBaseMetaTileEntity.isClientSide()) return true;
aBaseMetaTileEntity.openGUI(aPlayer);
return true;
}
@Override
public boolean isSimpleMachine() {
return true;
}
@Override
public boolean isFacingValid(byte aFacing) {
return true;
}
@Override
public boolean isAccessAllowed(EntityPlayer aPlayer) {
return true;
}
@Override
public final byte getUpdateData() {
return 0x00;
}
@Override
public boolean doesFillContainers() {
return true;
}
@Override
public boolean doesEmptyContainers() {
return true;
}
@Override
public boolean canTankBeFilled() {
return true;
}
@Override
public boolean canTankBeEmptied() {
return true;
}
@Override
public boolean displaysItemStack() {
return true;
}
@Override
public boolean displaysStackSize() {
return false;
}
@Override
public String[] getInfoData() {
if (mFluid == null) {
return new String[]{
EnumChatFormatting.BLUE + "Super Tank"+ EnumChatFormatting.RESET,
"Stored Fluid:",
EnumChatFormatting.GOLD + "No Fluid"+ EnumChatFormatting.RESET,
EnumChatFormatting.GREEN + Integer.toString(0) + " L"+ EnumChatFormatting.RESET+" "+
EnumChatFormatting.YELLOW + NumberFormat.getNumberInstance().format(getCapacity()) + " L"+ EnumChatFormatting.RESET
};
}
return new String[]{
EnumChatFormatting.BLUE + "Super Tank"+ EnumChatFormatting.RESET,
"Stored Fluid:",
EnumChatFormatting.GOLD + mFluid.getLocalizedName()+ EnumChatFormatting.RESET,
EnumChatFormatting.GREEN + NumberFormat.getNumberInstance().format(mFluid.amount) + " L"+ EnumChatFormatting.RESET+" "+
EnumChatFormatting.YELLOW+ NumberFormat.getNumberInstance().format(getCapacity()) + " L"+ EnumChatFormatting.RESET
};
}
@Override
public boolean isGivingInformation() {
return true;
}
private static int CommonSizeCompute(int tier){
switch(tier){
case 0:
return 1000000;
case 1:
return 4000000;
case 2:
return 8000000;
case 3:
return 16000000;
case 4:
return 32000000;
case 5:
return 64000000;
default:
return 0;
}
}
@Override
public int getCapacity() {
return CommonSizeCompute(mTier);
}
@Override
public int getTankPressure() {
return 100;
}
}
|
/*
* Copyright (c) 2005-2011 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.accounts.productsmix.util;
import java.util.Date;
import org.mifos.accounts.productdefinition.business.SavingsOfferingBO;
import org.mifos.accounts.productdefinition.util.helpers.ApplicableTo;
import org.mifos.accounts.productdefinition.util.helpers.InterestCalcType;
import org.mifos.accounts.productdefinition.util.helpers.PrdStatus;
import org.mifos.accounts.productdefinition.util.helpers.RecommendedAmountUnit;
import org.mifos.accounts.productdefinition.util.helpers.SavingsType;
import org.mifos.application.meeting.business.MeetingBO;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.util.helpers.TestObjectFactory;
public class ProductMixTestHelper {
public static SavingsOfferingBO createSavingOffering(String name, String shortName, MeetingBO meeting1,
MeetingBO meeting2) throws PersistenceException {
return TestObjectFactory.createSavingsProduct(name, shortName, ApplicableTo.CLIENTS, new Date(System
.currentTimeMillis()), PrdStatus.SAVINGS_ACTIVE, 300.0, RecommendedAmountUnit.PER_INDIVIDUAL, 1.2,
200.0, 200.0, SavingsType.VOLUNTARY, InterestCalcType.MINIMUM_BALANCE, meeting1, meeting2);
}
}
|
package com.company.professor;
import com.company.professor.DaoSession;
import de.greenrobot.dao.DaoException;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT. Enable "keep" sections if you want to edit.
/**
* Entity mapped to table "stat_names".
*/
public class StatNames {
private long StatId;
private long LocalLanguageId;
/** Not-null value. */
private String Name;
/** Used to resolve relations */
private transient DaoSession daoSession;
/** Used for active entity operations. */
private transient StatNamesDao myDao;
private Stats Stats;
private Long Stats__resolvedKey;
private Languages Languages;
private Long Languages__resolvedKey;
public StatNames() {
}
public StatNames(long StatId) {
this.StatId = StatId;
}
public StatNames(long StatId, long LocalLanguageId, String Name) {
this.StatId = StatId;
this.LocalLanguageId = LocalLanguageId;
this.Name = Name;
}
/** called by internal mechanisms, do not call yourself. */
public void __setDaoSession(DaoSession daoSession) {
this.daoSession = daoSession;
myDao = daoSession != null ? daoSession.getStatNamesDao() : null;
}
public long getStatId() {
return StatId;
}
public void setStatId(long StatId) {
this.StatId = StatId;
}
public long getLocalLanguageId() {
return LocalLanguageId;
}
public void setLocalLanguageId(long LocalLanguageId) {
this.LocalLanguageId = LocalLanguageId;
}
/** Not-null value. */
public String getName() {
return Name;
}
/** Not-null value; ensure this value is available before it is saved to the database. */
public void setName(String Name) {
this.Name = Name;
}
/** To-one relationship, resolved on first access. */
public Stats getStats() {
long __key = this.StatId;
if (Stats__resolvedKey == null || !Stats__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
StatsDao targetDao = daoSession.getStatsDao();
Stats StatsNew = targetDao.load(__key);
synchronized (this) {
Stats = StatsNew;
Stats__resolvedKey = __key;
}
}
return Stats;
}
public void setStats(Stats Stats) {
if (Stats == null) {
throw new DaoException("To-one property 'StatId' has not-null constraint; cannot set to-one to null");
}
synchronized (this) {
this.Stats = Stats;
StatId = Stats.getId();
Stats__resolvedKey = StatId;
}
}
/** To-one relationship, resolved on first access. */
public Languages getLanguages() {
long __key = this.LocalLanguageId;
if (Languages__resolvedKey == null || !Languages__resolvedKey.equals(__key)) {
if (daoSession == null) {
throw new DaoException("Entity is detached from DAO context");
}
LanguagesDao targetDao = daoSession.getLanguagesDao();
Languages LanguagesNew = targetDao.load(__key);
synchronized (this) {
Languages = LanguagesNew;
Languages__resolvedKey = __key;
}
}
return Languages;
}
public void setLanguages(Languages Languages) {
if (Languages == null) {
throw new DaoException("To-one property 'LocalLanguageId' has not-null constraint; cannot set to-one to null");
}
synchronized (this) {
this.Languages = Languages;
LocalLanguageId = Languages.getId();
Languages__resolvedKey = LocalLanguageId;
}
}
/** Convenient call for {@link AbstractDao#delete(Object)}. Entity must attached to an entity context. */
public void delete() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.delete(this);
}
/** Convenient call for {@link AbstractDao#update(Object)}. Entity must attached to an entity context. */
public void update() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.update(this);
}
/** Convenient call for {@link AbstractDao#refresh(Object)}. Entity must attached to an entity context. */
public void refresh() {
if (myDao == null) {
throw new DaoException("Entity is detached from DAO context");
}
myDao.refresh(this);
}
}
|
package com.ecorp.gorillamail.entities;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import org.hibernate.annotations.Fetch;
import org.hibernate.annotations.FetchMode;
@NoArgsConstructor
@Entity
@Table( name = "ExternalResources" )
@NamedQueries({
@NamedQuery(name = ExternalResource.QUERY_BY_SHORT_URL, query = "SELECT e FROM ExternalResource e WHERE e.shortenedUrl = :url"),
@NamedQuery(name = ExternalResource.QUERY_BY_ORIGINAL_URL, query = "SELECT e FROM ExternalResource e WHERE e.originalUrl = :url"),
})
public class ExternalResource extends AbstractLongEntity {
public static final String QUERY_BY_SHORT_URL = "query_by_short_url",
QUERY_BY_ORIGINAL_URL = "query_by_original_url";
private static final long serialVersionUID = 0L;
@Column( nullable = false )
@Getter
@Setter
private String originalUrl;
@Column( nullable = false )
@Getter
@Setter
private String shortenedUrl;
@ManyToOne
@JoinColumn( name = "template" )
@Getter
@Setter
private Template template = null;
@Fetch( FetchMode.SELECT )
@OneToMany( mappedBy = "resource", fetch = FetchType.EAGER, cascade = CascadeType.ALL, orphanRemoval = true )
@Getter
private Set<VisitorInformation> visitors = new HashSet<>();
public ExternalResource(String originalUrl, String shortenedUrl) {
setOriginalUrl(originalUrl);
setShortenedUrl(shortenedUrl);
}
}
|
package com.codeosseum.ares.eventbus.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class MapperConfig {
@Bean
public ObjectMapper eventMapperObjectMapper() {
return new ObjectMapper();
}
}
|
package net.ontrack.core;
public interface Patterns {
String NAME_PATTERN = "[A-Za-z0-9_\\.\\-]*";
String ACCOUNT_NAME_PATTERN = "[A-Za-z0-9_]*";
}
|
class Solution {
public int XXX(String s) {
s = s.trim();
int last = s.lastIndexOf(' ');
return s.length()-(last+1)
}
}
|
package ncu.cc.digger.repositories;
import ncu.cc.digger.entities.SceneReportViewEntity;
import ncu.cc.digger.entities.UnivReportViewEntity;
import ncu.cc.digger.models.statistics.*;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import java.util.List;
public interface UnivReportViewRepository extends JpaRepository<UnivReportViewEntity,String> {
Page<UnivReportViewEntity> findAllByOrderByRank(Pageable pageable);
Page<UnivReportViewEntity> findAllByCountryCodeOrderByRank(Pageable pageable);
Page<UnivReportViewEntity> findAllByOrderBySeverityUrgentAscSeverityHighAscSeverityMediumAscSeverityLowAscSeverityInfoAscZoneIdAsc(Pageable pageable);
Page<UnivReportViewEntity> findByCountryCodeOrderBySeverityUrgentAscSeverityHighAscSeverityMediumAscSeverityLowAscSeverityInfoAscZoneIdAsc(String countryCode, Pageable pageable);
@Query("SELECT new ncu.cc.digger.models.statistics.SeverityLevelStatistics(e.severityLevel, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.severityLevel")
List<SeverityLevelStatistics> findSeverityLevelStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.NonCompliantEdnsStatistics(e.nonCompliantEdns, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.nonCompliantEdns")
List<NonCompliantEdnsStatistics> findNonCompliantEdnsStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.IPv6AvailableStatistics(e.ipv6Available, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.ipv6Available")
List<IPv6AvailableStatistics> findIPv6AvailableStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.DnssecEnabledStatistics(e.dnssecEnabled, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.dnssecEnabled")
List<DnssecEnabledStatistics> findDnssecEnabledStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.NumberOfProblemsStatistics(e.numberOfProblems, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.numberOfProblems")
List<NumberOfProblemsStatistics> findNumberOfProblemsStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.NumberOfServersStatistics(e.numberOfServers, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.numberOfServers")
List<NumberOfServersStatistics> findNumberOfServersStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.OpenAxfrStatistics(e.openAxfr, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.openAxfr")
List<OpenAxfrStatistics> findOpenAxfrStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.OpenRecursiveStatistics(e.openRecursive, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.openRecursive")
List<OpenRecursiveStatistics> findOpenRecursiveStatistics();
@Query("SELECT new ncu.cc.digger.models.statistics.RrsetInconsistencyStatistics(e.rrsetInconsistency, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.updatedAt IS NOT NULL " +
"GROUP BY e.rrsetInconsistency")
List<RrsetInconsistencyStatistics> findRrsetInconsistencyStatistics();
////////////// With Country Code
@Query("SELECT new ncu.cc.digger.models.statistics.SeverityLevelStatistics(e.severityLevel, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.severityLevel")
List<SeverityLevelStatistics> findSeverityLevelStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.NonCompliantEdnsStatistics(e.nonCompliantEdns, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.nonCompliantEdns")
List<NonCompliantEdnsStatistics> findNonCompliantEdnsStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.IPv6AvailableStatistics(e.ipv6Available, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.ipv6Available")
List<IPv6AvailableStatistics> findIPv6AvailableStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.DnssecEnabledStatistics(e.dnssecEnabled, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.dnssecEnabled")
List<DnssecEnabledStatistics> findDnssecEnabledStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.NumberOfProblemsStatistics(e.numberOfProblems, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.numberOfProblems")
List<NumberOfProblemsStatistics> findNumberOfProblemsStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.NumberOfServersStatistics(e.numberOfServers, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.numberOfServers")
List<NumberOfServersStatistics> findNumberOfServersStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.OpenAxfrStatistics(e.openAxfr, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.openAxfr")
List<OpenAxfrStatistics> findOpenAxfrStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.OpenRecursiveStatistics(e.openRecursive, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.openRecursive")
List<OpenRecursiveStatistics> findOpenRecursiveStatistics(String countryCode);
@Query("SELECT new ncu.cc.digger.models.statistics.RrsetInconsistencyStatistics(e.rrsetInconsistency, COUNT(e)) " +
"FROM #{#entityName} e " +
"WHERE e.countryCode = ?1 AND e.updatedAt IS NOT NULL " +
"GROUP BY e.rrsetInconsistency")
List<RrsetInconsistencyStatistics> findRrsetInconsistencyStatistics(String countryCode);
}
|
package com.chenjie.sbootscheduling.task;
import lombok.extern.slf4j.Slf4j;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* @Author chenjie
* @Date 2018/10/10 23:18
* @Description:
*/
@Component
@Slf4j
public class ScheduledTask {
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("HH:mm:ss");
/**
* 通过在方法上加@Scheduled注解,表明该方法是一个调度任务。
* @Scheduled(fixedRate = 5000) :上一次开始执行时间点之后5秒再执行
* @Scheduled(fixedDelay = 5000) :上一次执行完毕时间点之后5秒再执行
* @Scheduled(initialDelay=1000, fixedRate=5000) :第一次延迟1秒后执行,之后按fixedRate的规则每5秒执行一次
* @Scheduled(cron=” /5 “) :通过cron表达式定义规则,什么是cro表达式,自行搜索引擎。
*/
@Scheduled(fixedRate = 10000)
public void reportCurrentTime() {
log.info("定时任务,现在时间是: {}", dateFormat.format(new Date()));
}
}
|
package org.ds.algos.practice.ds.algos.sorting;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class MergeSortTest {
MergeSort mergeSort;
public MergeSortTest() {
mergeSort = new MergeSort();
}
@Test
void shouldSortAnUnsortedArray() {
int[] arr = {1,5,8,1,9};
int[] sortedArray = {1,1,5,8,9};
assertArrayEquals(sortedArray, mergeSort.sort(arr));
}
@Test
void shouldSortAnAlreadtSortedArray() {
int[] sortedArray = {1,1,5,8,9};
assertArrayEquals(sortedArray, mergeSort.sort(sortedArray));
}
@Test
void shouldHandleEmptyArrayWithoutError() {
int[] sortedArray = {};
assertArrayEquals(sortedArray, mergeSort.sort(sortedArray));
}
@Test
void shouldThrowExceptionForNullArray() {
assertThrows(IllegalArgumentException.class, () -> mergeSort.sort(null));
}
}
|
/*
* Copyright 2019 WeBank
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.webank.wedatasphere.dss.appconn.eventchecker.adapter;
import org.apache.log4j.Logger;
import java.util.Properties;
public interface EventCheckAdapter {
boolean sendMsg(int jobId, Properties props, Logger log);
boolean reciveMsg(int jobId, Properties props, Logger log);
}
|
/*
* Copyright 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.fedoracoin.core;
import java.util.List;
/**
* The "getheaders" command is structurally identical to "getblocks", but has different meaning. On receiving this
* message a Bitcoin node returns matching blocks up to the limit, but without the bodies. It is useful as an
* optimization: when your wallet does not contain any keys created before a particular time, you don't have to download
* the bodies for those blocks because you know there are no relevant transactions.
*/
public class GetHeadersMessage extends GetBlocksMessage {
public GetHeadersMessage(NetworkParameters params, List<Sha256Hash> locator, Sha256Hash stopHash) {
super(params, locator, stopHash);
}
public GetHeadersMessage(NetworkParameters params, byte[] msg) throws ProtocolException {
super(params, msg);
}
@Override
public String toString() {
StringBuffer b = new StringBuffer();
b.append("getheaders: ");
for (Sha256Hash hash : locator) {
b.append(hash.toString());
b.append(" ");
}
return b.toString();
}
/**
* Compares two getheaders messages. Note that even though they are structurally identical a GetHeadersMessage
* will not compare equal to a GetBlocksMessage containing the same data.
*/
@Override
public boolean equals(Object o) {
if (o == null || o.getClass() != getClass()) return false;
GetHeadersMessage other = (GetHeadersMessage) o;
return (other.version == version &&
locator.size() == other.locator.size() && locator.containsAll(other.locator) &&
stopHash.equals(other.stopHash));
}
@Override
public int hashCode() {
int hashCode = (int) version ^ "getheaders".hashCode();
for (Sha256Hash aLocator : locator) hashCode ^= aLocator.hashCode();
hashCode ^= stopHash.hashCode();
return hashCode;
}
}
|
import java.util.*;
public class Solution {
public static void main(String[] args) {
Scanner sc = new Scanner(System.in);
int n = 0;
while (sc.hasNext()) {
System.out.println(++n + " " + sc.nextLine());
}
}
}
|
package com.example.foodorderiing.activity.setting;
import android.os.Bundle;
import android.util.Log;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
import androidx.appcompat.app.AppCompatActivity;
import com.example.foodorderiing.R;
import com.example.foodorderiing.database.DatabaseHelper;
import com.example.foodorderiing.database.dao.UserDao;
import com.example.foodorderiing.helper.App;
import com.google.android.material.bottomsheet.BottomSheetDialog;
public class SettingActivity extends AppCompatActivity {
private LinearLayout linearName, linearPass;
private DatabaseHelper db;
private UserDao userDao;
private TextView defultName;
private EditText name;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_setting);
setDataBase();
initID();
setDefultname();
setLinearPass();
// setLinearName();
}
@Override
protected void onResume() {
super.onResume();
setLinearName();
// if(userDao.getUserName() != null) name.setText(userDao.getUserName());
}
private void setDataBase(){
db = App.getDatabase();
userDao = db.userDao();
}
private void initID(){
linearName = findViewById(R.id.linear_name_sitting);
linearPass = findViewById(R.id.linear_pass_sitting);
defultName = findViewById(R.id.tv_name_setting);
}
private void setDefultname(){
defultName.setText(userDao.getUserName());
}
private void setLinearName(){
linearName.setOnClickListener(v -> {
BottomSheetDialog btn = new BottomSheetDialog(this);
btn.setContentView(R.layout.bottom_sheet_name_setting);
name = (EditText) btn.findViewById(R.id.et_name_setting);
TextView save = (TextView) btn.findViewById(R.id.tv_saveName_setting);
TextView cancel = (TextView) btn.findViewById(R.id.tv_cancelName_setting);
name.setText(userDao.getUserName());
save.setOnClickListener(v1 -> {
String nameUser = name.getText().toString();
userDao.updateUserName(nameUser);
Toast.makeText(getApplicationContext(), "با موفقیت تغییر کرد", Toast.LENGTH_SHORT).show();
btn.dismiss();
});
cancel.setOnClickListener(v1 -> {
btn.dismiss();
});
btn.show();
});
}
private void setLinearPass(){
linearPass.setOnClickListener(v -> {
BottomSheetDialog btn = new BottomSheetDialog(this);
btn.setContentView(R.layout.bottom_sheet_pass_setting);
EditText oldPass = (EditText) btn.findViewById(R.id.et_oldPass_setting);
EditText newPass = (EditText) btn.findViewById(R.id.et_newPass_setting);
EditText confirmPass = (EditText) btn.findViewById(R.id.et_confirmPass_setting);
TextView save = (TextView) btn.findViewById(R.id.tv_saveName_setting);
TextView cancel = (TextView) btn.findViewById(R.id.tv_cancelName_setting);
save.setOnClickListener(v1 -> {
if(oldPass.getText().toString().equals(userDao.getUserPass())){
if(newPass.getText().toString().equals(confirmPass.getText().toString())){
userDao.updateUserPass(newPass.getText().toString());
Toast.makeText(getApplicationContext(), "با موفقیت تغییر کرد", Toast.LENGTH_SHORT).show();
btn.dismiss();
}else Toast.makeText(getApplicationContext(), "پسورد مطابقت ندارد", Toast.LENGTH_SHORT).show();
}else {
Toast.makeText(getApplicationContext(), "پسورد وارد شده اشتباه است", Toast.LENGTH_SHORT).show();
}
});
cancel.setOnClickListener(v1 -> {
btn.dismiss();
});
btn.show();
});
}
}
|
/*
Copyright [2020] [https://www.stylefeng.cn]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Guns采用APACHE LICENSE 2.0开源协议,您在使用过程中,需要注意以下几点:
1.请不要删除和修改根目录下的LICENSE文件。
2.请不要删除和修改Guns源码头部的版权声明。
3.请保留源码和相关描述文件的项目出处,作者声明等。
4.分发源码时候,请注明软件出处 https://gitee.com/stylefeng/guns-separation
5.在修改包名,模块名称,项目代码等时,请注明软件出处 https://gitee.com/stylefeng/guns-separation
6.若您的项目无法满足以上几点,可申请商业授权,获取Guns商业授权许可,请在官网购买授权,地址为 https://www.stylefeng.cn
*/
package cn.stylefeng.guns.core.enums;
/**
* 逻辑枚举
*
* @author xuyuxiang
* @date 2020/4/5 10:23
*/
public enum LogicTypeEnum {
/**
* 与
*/
AND,
/**
* 或
*/
OR
}
|
// Targeted by JavaCPP version 1.5.2: DO NOT EDIT THIS FILE
package org.bytedeco.tensorflow;
import org.bytedeco.tensorflow.Allocator;
import java.nio.*;
import org.bytedeco.javacpp.*;
import org.bytedeco.javacpp.annotation.*;
import static org.bytedeco.tensorflow.global.tensorflow.*;
/** Computes the gradients of depthwise convolution with respect to the input.
*
* Arguments:
* * scope: A Scope object
* * input_sizes: An integer vector representing the shape of {@code input}, based
* on {@code data_format}. For example, if {@code data_format} is 'NHWC' then
* {@code input} is a 4-D {@code [batch, height, width, channels]} tensor.
* * filter: 4-D with shape
* {@code [filter_height, filter_width, in_channels, depthwise_multiplier]}.
* * out_backprop: 4-D with shape based on {@code data_format}.
* For example, if {@code data_format} is 'NHWC' then
* out_backprop shape is {@code [batch, out_height, out_width, out_channels]}.
* Gradients w.r.t. the output of the convolution.
* * strides: The stride of the sliding window for each dimension of the input
* of the convolution.
* * padding: The type of padding algorithm to use.
*
* Optional attributes (see {@code Attrs}):
* * data_format: Specify the data format of the input and output data. With the
* default format "NHWC", the data is stored in the order of:
* [batch, height, width, channels].
* Alternatively, the format could be "NCHW", the data storage order of:
* [batch, channels, height, width].
* * dilations: 1-D tensor of length 4. The dilation factor for each dimension of
* {@code input}. If set to k > 1, there will be k-1 skipped cells between each filter
* element on that dimension. The dimension order is determined by the value of
* {@code data_format}, see above for details. Dilations in the batch and depth
* dimensions must be 1.
*
* Returns:
* * {@code Output}: 4-D with shape according to {@code data_format}. For example, if
* {@code data_format} is 'NHWC', output shape is {@code [batch, in_height,
* in_width, in_channels]}. Gradient w.r.t. the input of the
* convolution. */
@Namespace("tensorflow::ops") @NoOffset @Properties(inherit = org.bytedeco.tensorflow.presets.tensorflow.class)
public class DepthwiseConv2dNativeBackpropInput extends Pointer {
static { Loader.load(); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public DepthwiseConv2dNativeBackpropInput(Pointer p) { super(p); }
/** Optional attribute setters for DepthwiseConv2dNativeBackpropInput */
public static class Attrs extends Pointer {
static { Loader.load(); }
/** Default native constructor. */
public Attrs() { super((Pointer)null); allocate(); }
/** Native array allocator. Access with {@link Pointer#position(long)}. */
public Attrs(long size) { super((Pointer)null); allocateArray(size); }
/** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */
public Attrs(Pointer p) { super(p); }
private native void allocate();
private native void allocateArray(long size);
@Override public Attrs position(long position) {
return (Attrs)super.position(position);
}
/** Specify the data format of the input and output data. With the
* default format "NHWC", the data is stored in the order of:
* [batch, height, width, channels].
* Alternatively, the format could be "NCHW", the data storage order of:
* [batch, channels, height, width].
*
* Defaults to "NHWC" */
///
public native @ByVal Attrs DataFormat(@StringPiece BytePointer x);
public native @ByVal Attrs DataFormat(@StringPiece String x);
/** 1-D tensor of length 4. The dilation factor for each dimension of
* {@code input}. If set to k > 1, there will be k-1 skipped cells between each filter
* element on that dimension. The dimension order is determined by the value of
* {@code data_format}, see above for details. Dilations in the batch and depth
* dimensions must be 1.
*
* Defaults to [1, 1, 1, 1] */
public native @ByVal Attrs Dilations(@ArraySlice IntPointer x);
public native @ByVal Attrs Dilations(@ArraySlice IntBuffer x);
public native @ByVal Attrs Dilations(@ArraySlice int... x);
public native @StringPiece BytePointer data_format_(); public native Attrs data_format_(BytePointer setter);
public native @ArraySlice IntPointer dilations_(); public native Attrs dilations_(IntPointer setter);
}
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece BytePointer padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece BytePointer padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece String padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece String padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece BytePointer padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece BytePointer padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece String padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece String padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece BytePointer padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece BytePointer padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece String padding) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece String padding);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece String padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece String padding, @Const @ByRef Attrs attrs);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece String padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntPointer strides, @StringPiece String padding, @Const @ByRef Attrs attrs);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice IntBuffer strides, @StringPiece BytePointer padding, @Const @ByRef Attrs attrs);
public DepthwiseConv2dNativeBackpropInput(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece String padding, @Const @ByRef Attrs attrs) { super((Pointer)null); allocate(scope, input_sizes, filter, out_backprop, strides, padding, attrs); }
private native void allocate(@Const @ByRef Scope scope,
@ByVal Input input_sizes,
@ByVal Input filter,
@ByVal Input out_backprop, @ArraySlice int[] strides, @StringPiece String padding, @Const @ByRef Attrs attrs);
public native @ByVal @Name("operator tensorflow::Output") Output asOutput();
public native @ByVal @Name("operator tensorflow::Input") Input asInput();
public native Node node();
public static native @ByVal Attrs DataFormat(@StringPiece BytePointer x);
public static native @ByVal Attrs DataFormat(@StringPiece String x);
public static native @ByVal Attrs Dilations(@ArraySlice IntPointer x);
public static native @ByVal Attrs Dilations(@ArraySlice IntBuffer x);
public static native @ByVal Attrs Dilations(@ArraySlice int... x);
public native @ByRef Operation operation(); public native DepthwiseConv2dNativeBackpropInput operation(Operation setter);
public native @ByRef Output output(); public native DepthwiseConv2dNativeBackpropInput output(Output setter);
}
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.ci.di;
import com.google.common.base.Preconditions;
import com.google.inject.AbstractModule;
import com.google.inject.Injector;
import com.google.inject.internal.SingletonScope;
import com.google.inject.matcher.Matchers;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import javax.inject.Provider;
import org.apache.ignite.Ignite;
import org.apache.ignite.ci.db.Ignite1Init;
import org.apache.ignite.ci.di.cache.GuavaCachedModule;
import org.apache.ignite.ci.di.scheduler.SchedulerModule;
import org.apache.ignite.ci.github.ignited.GitHubIgnitedModule;
import org.apache.ignite.ci.jira.ignited.JiraIgnitedModule;
import org.apache.ignite.ci.observer.BuildObserver;
import org.apache.ignite.ci.observer.ObserverTask;
import org.apache.ignite.ci.tcbot.TcBotBusinessServicesModule;
import org.apache.ignite.ci.tcbot.issue.IssueDetector;
import org.apache.ignite.ci.teamcity.ignited.TeamcityIgnitedModule;
import org.apache.ignite.ci.util.ExceptionUtil;
import org.apache.ignite.ci.web.TcUpdatePool;
import org.apache.ignite.ci.web.model.hist.VisasHistoryStorage;
import org.apache.ignite.ci.web.rest.exception.ServiceStartingException;
/**
*
*/
public class IgniteTcBotModule extends AbstractModule {
/** Ignite future. */
private Future<Ignite> igniteFut;
/** {@inheritDoc} */
@Override protected void configure() {
install(new GuavaCachedModule());
configProfiling();
configTaskMonitor();
bind(Ignite.class).toProvider((Provider<Ignite>)() -> {
Preconditions.checkNotNull(igniteFut, "Ignite future is not yet initialized");
try {
return igniteFut.get(10, TimeUnit.SECONDS);
}
catch (TimeoutException e) {
throw new ServiceStartingException(e);
}
catch (Exception e) {
e.printStackTrace();
throw ExceptionUtil.propagateException(e);
}
});
bind(TcUpdatePool.class).in(new SingletonScope());
bind(IssueDetector.class).in(new SingletonScope());
bind(ObserverTask.class).in(new SingletonScope());
bind(BuildObserver.class).in(new SingletonScope());
bind(VisasHistoryStorage.class).in(new SingletonScope());
install(new TeamcityIgnitedModule());
install(new JiraIgnitedModule());
install(new GitHubIgnitedModule());
install(new SchedulerModule());
install(new TcBotBusinessServicesModule());
}
private void configProfiling() {
AutoProfilingInterceptor profilingInterceptor = new AutoProfilingInterceptor();
bindInterceptor(Matchers.any(),
Matchers.annotatedWith(AutoProfiling.class),
profilingInterceptor);
bind(AutoProfilingInterceptor.class).toInstance(profilingInterceptor);
}
private void configTaskMonitor() {
MonitoredTaskInterceptor profilingInterceptor = new MonitoredTaskInterceptor();
bindInterceptor(Matchers.any(),
Matchers.annotatedWith(MonitoredTask.class),
profilingInterceptor);
bind(MonitoredTaskInterceptor.class).toInstance(profilingInterceptor);
}
public void setIgniteFut(Future<Ignite> igniteFut) {
this.igniteFut = igniteFut;
}
public Injector startIgniteInit(Injector injector) {
final Ignite1Init instance = injector.getInstance(Ignite1Init.class);
final Future<Ignite> submit = instance.getIgniteFuture();
setIgniteFut(submit);
return injector;
}
}
|
/*
* Copyright (C) 2009 University of Washington
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.odk.collect.android.listeners;
import java.util.HashMap;
import android.net.Uri;
/**
* @author Carl Hartung (carlhartung@gmail.com)
*/
public interface InstanceUploaderListener {
void uploadingComplete(HashMap<String, String> result);
void progressUpdate(int progress, int total);
void authRequest(Uri url, HashMap<String, String> doneSoFar);
}
|
/*
* Copyright 2014 Adam Mackler
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.galilelj.utils;
import static org.galilelj.core.Coin.SMALLEST_UNIT_EXPONENT;
import com.google.common.collect.ImmutableList;
import java.math.BigInteger;
import static java.math.BigDecimal.ONE;
import static java.math.BigDecimal.ZERO;
import java.math.BigDecimal;
import static java.math.RoundingMode.HALF_UP;
import java.text.DecimalFormat;
import java.text.DecimalFormatSymbols;
import java.text.NumberFormat;
import java.util.Locale;
/**
* <p>This class, a concrete extension of {@link BtcFormat}, is distinguished by its
* accommodation of multiple denominational units as follows:
*
* <p>When formatting Bitcoin monetary values, an instance of this class automatically adjusts
* the denominational units in which it represents a given value so as to minimize the number
* of consecutive zeros in the number that is displayed, and includes either a currency code or
* symbol in the formatted value to indicate which denomination was chosen.
*
* <p>When parsing <code>String</code> representations of Bitcoin monetary values, instances of
* this class automatically recognize units indicators consisting of currency codes and
* symbols, including including those containing currency or metric prefixes such as
* <code>"¢"</code> or <code>"c"</code> to indicate hundredths, and interpret each number being
* parsed in accordance with the recognized denominational units.
*
* <p>A more detailed explanation, including examples, is in the documentation for the {@link
* BtcFormat} class, and further information beyond that is in the documentation for the {@link
* java.text.Format} class, from which this class descends.
* @see java.text.Format
* @see java.text.NumberFormat
* @see java.text.DecimalFormat
* @see DecimalFormatSymbols
* @see org.galilelj.core.Coin
*/
public final class BtcAutoFormat extends BtcFormat {
/**
* Enum for specifying the style of currency indicators thas are used
* when formatting, ether codes or symbols.
*/
public enum Style {
/* Notes:
* 1) The odd-looking character in the replacements below, named "currency sign," is used in
* the patterns recognized by Java's number formatter. A single occurrence of this
* character specifies a currency symbol, while two adjacent occurrences indicate an
* international currency code.
* 2) The positive and negative patterns each have three parts: prefix, number, suffix.
* The number characters are limited to digits, zero, decimal-separator, group-separator, and
* scientific-notation specifier: [#0.,E]
* All number characters besides 'E' must be single-quoted in order to appear as
* literals in either the prefix or suffix.
* These patterns are explained in the documentation for java.text.DecimalFormat.
*/
/** Constant for the formatting style that uses a currency code, e.g., "GALI". */
CODE {
@Override void apply(DecimalFormat decimalFormat) {
/* To switch to using codes from symbols, we replace each single occurrence of the
* currency-sign character with two such characters in a row.
* We also insert a space character between every occurence of this character and an
* adjacent numerical digit or negative sign (that is, between the currency-sign and
* the signed-number). */
decimalFormat.applyPattern(
negify(decimalFormat.toPattern()).replaceAll("¤","¤¤").
replaceAll("([#0.,E-])¤¤","$1 ¤¤").
replaceAll("¤¤([0#.,E-])","¤¤ $1")
);
}
},
/** Constant for the formatting style that uses a currency symbol, e.g., "G". */
SYMBOL {
@Override void apply(DecimalFormat decimalFormat) {
/* To make certain we are using symbols rather than codes, we replace
* each double occurrence of the currency sign character with a single. */
decimalFormat.applyPattern(negify(decimalFormat.toPattern()).replaceAll("¤¤","¤"));
}
};
/** Effect a style corresponding to an enum value on the given number formatter object. */
abstract void apply(DecimalFormat decimalFormat);
}
/** Constructor */
protected BtcAutoFormat(Locale locale, Style style, int fractionPlaces) {
super((DecimalFormat)NumberFormat.getCurrencyInstance(locale), fractionPlaces, ImmutableList.<Integer>of());
style.apply(this.numberFormat);
}
/**
* Calculate the appropriate denomination for the given Bitcoin monetary value. This
* method takes a BigInteger representing a quantity of satoshis, and returns the
* number of places that value's decimal point is to be moved when formatting said value
* in order that the resulting number represents the correct quantity of denominational
* units.
*
* <p>As a side-effect, this sets the units indicators of the underlying NumberFormat object.
* Only invoke this from a synchronized method, and be sure to put the DecimalFormatSymbols
* back to its proper state, otherwise immutability, equals() and hashCode() fail.
*/
@Override
protected int scale(BigInteger satoshis, int fractionPlaces) {
/* The algorithm is as follows. TODO: is there a way to optimize step 4?
1. Can we use coin denomination w/ no rounding? If yes, do it.
2. Else, can we use millicoin denomination w/ no rounding? If yes, do it.
3. Else, can we use micro denomination w/ no rounding? If yes, do it.
4. Otherwise we must round:
(a) round to nearest coin + decimals
(b) round to nearest millicoin + decimals
(c) round to nearest microcoin + decimals
Subtract each of (a), (b) and (c) from the true value, and choose the
denomination that gives smallest absolute difference. It case of tie, use the
smaller denomination.
*/
int places;
int coinOffset = Math.max(SMALLEST_UNIT_EXPONENT - fractionPlaces, 0);
BigDecimal inCoins = new BigDecimal(satoshis).movePointLeft(coinOffset);
if (inCoins.remainder(ONE).compareTo(ZERO) == 0) {
places = COIN_SCALE;
} else {
BigDecimal inMillis = inCoins.movePointRight(MILLICOIN_SCALE);
if (inMillis.remainder(ONE).compareTo(ZERO) == 0) {
places = MILLICOIN_SCALE;
} else {
BigDecimal inMicros = inCoins.movePointRight(MICROCOIN_SCALE);
if (inMicros.remainder(ONE).compareTo(ZERO) == 0) {
places = MICROCOIN_SCALE;
} else {
// no way to avoid rounding: so what denomination gives smallest error?
BigDecimal a = inCoins.subtract(inCoins.setScale(0, HALF_UP)).
movePointRight(coinOffset).abs();
BigDecimal b = inMillis.subtract(inMillis.setScale(0, HALF_UP)).
movePointRight(coinOffset - MILLICOIN_SCALE).abs();
BigDecimal c = inMicros.subtract(inMicros.setScale(0, HALF_UP)).
movePointRight(coinOffset - MICROCOIN_SCALE).abs();
if (a.compareTo(b) < 0)
if (a.compareTo(c) < 0) places = COIN_SCALE;
else places = MICROCOIN_SCALE;
else if (b.compareTo(c) < 0) places = MILLICOIN_SCALE;
else places = MICROCOIN_SCALE;
}
}
}
prefixUnitsIndicator(numberFormat, places);
return places;
}
/** Returns the <code>int</code> value indicating coin denomination. This is what causes
* the number in a parsed value that lacks a units indicator to be interpreted as a quantity
* of bitcoins. */
@Override
protected int scale() { return COIN_SCALE; }
/** Return the number of decimal places in the fraction part of numbers formatted by this
* instance. This is the maximum number of fraction places that will be displayed;
* the actual number used is limited to a precision of satoshis. */
public int fractionPlaces() { return minimumFractionDigits; }
/** Return true if the other instance is equivalent to this one.
* Formatters for different locales will never be equal, even
* if they behave identically. */
@Override public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof BtcAutoFormat)) return false;
return super.equals(o);
}
/**
* Return a brief description of this formatter. The exact details of the representation
* are unspecified and subject to change, but will include some representation of the
* pattern and the number of fractional decimal places.
*/
@Override
public String toString() { return "Auto-format " + pattern(); }
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.